Merge branch 'release/1.1.1'
# Conflicts: # frontend/src/resources/css/style.css
This commit is contained in:
commit
a929b1a5fc
701 changed files with 32246 additions and 58543 deletions
16
.gitignore
vendored
16
.gitignore
vendored
|
|
@ -50,18 +50,24 @@ resources/src/main/generated-resources*/
|
|||
resources/src/main/resources/database/database_structure.xml
|
||||
|
||||
frontend/build*/
|
||||
frontend/.nx*/
|
||||
frontend/tmp*/
|
||||
frontend/.angular*/
|
||||
frontend/build_dev*/
|
||||
frontend/.angular*/
|
||||
frontend/dist*/
|
||||
frontend/node_modules*/
|
||||
frontend/src/ts/**/*.js
|
||||
frontend/src/ts/**/*.js.map
|
||||
frontend/src/ts/**/*.ngsummary.json
|
||||
frontend/src/ts/aot*/
|
||||
frontend/src/ts/page.routing.ts
|
||||
frontend/tsconfig.base.json
|
||||
frontend/modules/generated*/
|
||||
frontend/src/ts/generated*/
|
||||
frontend/modules/shared/src/lib/generated/
|
||||
npm-debug.log
|
||||
|
||||
.eslintignore
|
||||
|
||||
#Sublime project files
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
|
||||
config/*.ear
|
||||
config/output/
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
#Files for Webbpm-Studio to ignore
|
||||
#Files for Webbpm-Studio to ignore
|
||||
frontend/build/
|
||||
frontend/.angular/
|
||||
frontend/.nx/
|
||||
frontend/build_dev/
|
||||
frontend/dist/
|
||||
frontend/tmp/
|
||||
frontend/modules/generated/
|
||||
frontend/node_modules/
|
||||
frontend/src/ts/page.routing.ts
|
||||
frontend/src/ts/generated-sources/
|
||||
|
|
@ -18,7 +22,6 @@ resources/target/
|
|||
test/
|
||||
extensions/
|
||||
|
||||
config/
|
||||
target/
|
||||
themes/
|
||||
|
||||
|
|
@ -26,4 +29,4 @@ themes/
|
|||
.git/
|
||||
.idea/
|
||||
.studioignore
|
||||
**.js
|
||||
**.js
|
||||
|
|
|
|||
45
Dockerfile
Normal file
45
Dockerfile
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
ARG BUILDER_IMAGE=nexus.ervu.rt-sk.ru/ervu-base/node:20.9-alpine-git
|
||||
ARG RUNTIME_IMAGE=nexus.ervu.rt-sk.ru/ervu-base/ervu/jdk:17.0.13-alpine-3.21
|
||||
|
||||
|
||||
FROM $BUILDER_IMAGE AS builder
|
||||
|
||||
ARG MVN_FLAGS="-T4C -Pprod --batch-mode --no-transfer-progress"
|
||||
|
||||
RUN echo "https://dl-cdn.alpinelinux.org/alpine/v3.18/main" >> /etc/apk/repositories \
|
||||
&& echo "https://dl-cdn.alpinelinux.org/alpine/v3.18/community" >> /etc/apk/repositories \
|
||||
&& apk --update --no-cache add maven openjdk17-jdk
|
||||
|
||||
ENV LANG=ru_RU.UTF-8
|
||||
ENV LANGUAGE=ru_RU.UTF-8
|
||||
ENV LC_ALL=ru_RU.UTF-8
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
|
||||
RUN mkdir -p /root/.m2 \
|
||||
# && cp config/settings.xml /root/.m2/settings.xml \
|
||||
&& mvn clean ${MVN_FLAGS} \
|
||||
&& mvn package ${MVN_FLAGS}
|
||||
|
||||
RUN cd config-data-executor \
|
||||
&& mvn clean package ${MVN_FLAGS}
|
||||
|
||||
|
||||
FROM $RUNTIME_IMAGE
|
||||
|
||||
RUN rm -f /etc/apk/repositories \
|
||||
&& echo "https://dl-cdn.alpinelinux.org/alpine/v3.21/main" >> /etc/apk/repositories \
|
||||
&& apk --update --no-cache add nginx
|
||||
|
||||
ENV BACKEND_URL=http://localhost:8080
|
||||
ENV CONFIG_DATA_EXECUTOR_URL=http://localhost:8080/api
|
||||
|
||||
COPY config/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY --from=builder /app/frontend/dist /usr/share/nginx/html
|
||||
COPY --from=builder /app/backend/target/*.jar /home/app/backend.jar
|
||||
COPY --from=builder /app/config-data-executor/target/*.jar /home/app/cde.jar
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
ENTRYPOINT ["java", "-jar", "/home/app/backend.jar"]
|
||||
|
|
@ -34,7 +34,7 @@ CREATE DATABASE "<your-project-db>"
|
|||
OWNER = "<your-project-main-role>";
|
||||
```
|
||||
|
||||
ВНИМАНИЕ: в общем случае, отдельную БД для безопасности создавать не нужно. В конфигурации источника данных security-ds в файле standalone.xml в качестве имени базы данных используйте базу данных приложения.
|
||||
ВНИМАНИЕ: в общем случае, отдельную БД для безопасности создавать не нужно. В конфигурации источника данных security-ds в файле конфигурации в качестве имени базы данных используйте базу данных приложения.
|
||||
|
||||
Предоставление необходимых прав для роли <your-project-security-role>
|
||||
|
||||
|
|
|
|||
132
backend/pom.xml
132
backend/pom.xml
|
|
@ -5,12 +5,36 @@
|
|||
<parent>
|
||||
<groupId>ru.micord.ervu</groupId>
|
||||
<artifactId>eks</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.1.1</version>
|
||||
</parent>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-undertow</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.undertow</groupId>
|
||||
<artifactId>undertow-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-registry-prometheus</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-api</artifactId>
|
||||
|
|
@ -20,6 +44,10 @@
|
|||
<artifactId>jjwt-impl</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.packages.base</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>resources</artifactId>
|
||||
|
|
@ -38,14 +66,9 @@
|
|||
<groupId>org.jooq</groupId>
|
||||
<artifactId>jooq</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
|
|
@ -131,15 +154,6 @@
|
|||
<groupId>ru.cg.webbpm.modules.jndi</groupId>
|
||||
<artifactId>jndi-inject</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.mail</groupId>
|
||||
<artifactId>javax.mail</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.database</groupId>
|
||||
<artifactId>database-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules</groupId>
|
||||
<artifactId>standard-annotations</artifactId>
|
||||
|
|
@ -152,10 +166,6 @@
|
|||
<groupId>ru.cg.webbpm.modules.security</groupId>
|
||||
<artifactId>security-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.security</groupId>
|
||||
<artifactId>security-esia</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.reporting</groupId>
|
||||
<artifactId>reporting-api</artifactId>
|
||||
|
|
@ -176,14 +186,6 @@
|
|||
<groupId>ru.cg.webbpm.modules.reporting.reporting-jasper</groupId>
|
||||
<artifactId>reporting-jasper-runtime-impl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.reporting.reporting-xdoc</groupId>
|
||||
<artifactId>reporting-xdoc-impl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.reporting.reporting-xdoc</groupId>
|
||||
<artifactId>reporting-xdoc-runtime-impl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.liquibase</groupId>
|
||||
<artifactId>liquibase-core</artifactId>
|
||||
|
|
@ -192,14 +194,6 @@
|
|||
<groupId>ru.cg.webbpm.modules</groupId>
|
||||
<artifactId>webkit-base</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.security</groupId>
|
||||
<artifactId>security-db-synchronization-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.security</groupId>
|
||||
<artifactId>security-db-synchronization-ldap-impl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xerces</groupId>
|
||||
<artifactId>xercesImpl</artifactId>
|
||||
|
|
@ -208,30 +202,6 @@
|
|||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.micord.fias</groupId>
|
||||
<artifactId>client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.tika</groupId>
|
||||
<artifactId>tika-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.security.kerberos</groupId>
|
||||
<artifactId>spring-security-kerberos-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.security.kerberos</groupId>
|
||||
<artifactId>spring-security-kerberos-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.javacrumbs.shedlock</groupId>
|
||||
<artifactId>shedlock-spring</artifactId>
|
||||
|
|
@ -241,11 +211,24 @@
|
|||
<artifactId>shedlock-provider-jdbc-template</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.packages.base</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>me.snowdrop</groupId>
|
||||
<artifactId>narayana-spring-boot-starter</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.agroal</groupId>
|
||||
<artifactId>agroal-spring-boot-starter</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<finalName>${project.artifactId}</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
|
|
@ -274,21 +257,26 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<version>2.7.18</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>repackage</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<mainClass>ru.micord.ervu.eks.App</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>studio</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
<configuration>
|
||||
<warName>${project.artifactId}</warName>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>ru.cg.webbpm.modules.resources</groupId>
|
||||
|
|
|
|||
|
|
@ -1,31 +0,0 @@
|
|||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletException;
|
||||
|
||||
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
|
||||
import org.springframework.web.util.IntrospectorCleanupListener;
|
||||
|
||||
/**
|
||||
* This initializer creates root context and registers dispatcher servlet
|
||||
* Spring scans for initializers automatically
|
||||
*/
|
||||
public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
|
||||
|
||||
public void onStartup(ServletContext servletContext) throws ServletException {
|
||||
super.onStartup(servletContext);
|
||||
servletContext.addListener(new IntrospectorCleanupListener());
|
||||
}
|
||||
|
||||
protected String[] getServletMappings() {
|
||||
return new String[]{"/"};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?>[] getRootConfigClasses() {
|
||||
return new Class[]{AppConfig.class};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?>[] getServletConfigClasses() {
|
||||
return new Class[0];
|
||||
}
|
||||
}
|
||||
48
backend/src/main/java/dto/ConfigExecuteRequest.java
Normal file
48
backend/src/main/java/dto/ConfigExecuteRequest.java
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
package dto;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Alexandr Shalaginov
|
||||
*/
|
||||
public class ConfigExecuteRequest {
|
||||
public List<String> ids;
|
||||
|
||||
public LocalDate startDate;
|
||||
|
||||
public LocalDate endDate;
|
||||
|
||||
public List<String> getIds() {
|
||||
return ids;
|
||||
}
|
||||
|
||||
public void setIds(List<String> ids) {
|
||||
this.ids = ids;
|
||||
}
|
||||
|
||||
public LocalDate getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
public void setStartDate(LocalDate startDate) {
|
||||
this.startDate = startDate;
|
||||
}
|
||||
|
||||
public LocalDate getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
public void setEndDate(LocalDate endDate) {
|
||||
this.endDate = endDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ConfigExecuteRequest{" +
|
||||
"ids=" + ids +
|
||||
", startDate=" + startDate +
|
||||
", endDate=" + endDate +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,12 @@
|
|||
package rpc;
|
||||
|
||||
import dto.ConfigExecuteRequest;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import service.ConfigExecutorService;
|
||||
|
||||
import ru.cg.webbpm.modules.webkit.annotations.RpcCall;
|
||||
import ru.cg.webbpm.modules.webkit.annotations.RpcService;
|
||||
import ru.cg.webbpm.modules.webkit.beans.Behavior;
|
||||
import service.ConfigExecutorService;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Evgenii Malkov
|
||||
|
|
@ -14,14 +14,15 @@ import java.util.List;
|
|||
@RpcService
|
||||
public class ConfigExecutorRpcService extends Behavior {
|
||||
|
||||
private final ConfigExecutorService configExecutorService;
|
||||
private final ConfigExecutorService configExecutorService;
|
||||
|
||||
public ConfigExecutorRpcService(@Autowired ConfigExecutorService configExecutorService) {
|
||||
this.configExecutorService = configExecutorService;
|
||||
}
|
||||
public ConfigExecutorRpcService(@Autowired ConfigExecutorService configExecutorService) {
|
||||
this.configExecutorService = configExecutorService;
|
||||
}
|
||||
|
||||
@RpcCall
|
||||
public void callConfigExecutor(String methodPath, List<String> ids) {
|
||||
configExecutorService.call(methodPath, ids);
|
||||
}
|
||||
@RpcCall
|
||||
public String callConfigExecutor(String methodPath, ConfigExecuteRequest configExecuteRequest,
|
||||
boolean withDate) {
|
||||
return configExecutorService.call(methodPath, configExecuteRequest, withDate);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,92 +1,84 @@
|
|||
import net.javacrumbs.shedlock.core.LockProvider;
|
||||
import net.javacrumbs.shedlock.provider.jdbctemplate.JdbcTemplateLockProvider;
|
||||
import net.javacrumbs.shedlock.spring.ScheduledLockConfiguration;
|
||||
import net.javacrumbs.shedlock.spring.ScheduledLockConfigurationBuilder;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
|
||||
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.time.Duration;
|
||||
|
||||
/**
|
||||
* Root application context
|
||||
* This context imports XML configs from all the other jars, and is created by {@link WebAppInitializer}
|
||||
* NB: modules are excluded from component scan since spring-context.xml sometimes holds important parameters and / or annotations
|
||||
* @author krylov
|
||||
*/
|
||||
@Configuration
|
||||
@ComponentScan(basePackages = {
|
||||
"service",
|
||||
"dao",
|
||||
"bpmn",
|
||||
"i18n",
|
||||
"errorhandling",
|
||||
"database",
|
||||
"security",
|
||||
"component.addresses",
|
||||
"gen",
|
||||
"ru.cg",
|
||||
"ru.micord"
|
||||
})
|
||||
@EnableAspectJAutoProxy(proxyTargetClass = true)
|
||||
@EnableWebMvc
|
||||
@EnableScheduling
|
||||
public class AppConfig {
|
||||
|
||||
@Value("${config.data.executor.socket.timeout:10}")
|
||||
private int socketTimeout;
|
||||
@Value("${config.data.executor.connection.timeout:10}")
|
||||
private int connectionTimeout;
|
||||
|
||||
@Bean
|
||||
public PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer(){
|
||||
return new PropertySourcesPlaceholderConfigurer();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ScheduledLockConfiguration taskScheduler(LockProvider lockProvider) {
|
||||
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
|
||||
scheduler.setPoolSize(12);
|
||||
scheduler.initialize();
|
||||
return ScheduledLockConfigurationBuilder
|
||||
.withLockProvider(lockProvider)
|
||||
.withTaskScheduler(scheduler)
|
||||
.withDefaultLockAtMostFor(Duration.ofHours(4))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public LockProvider lockProvider(@Qualifier("datasource") DataSource dataSource) {
|
||||
return new JdbcTemplateLockProvider(dataSource);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public RestTemplate restTemplate() {
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
.setSocketTimeout(socketTimeout * 1000)
|
||||
.setConnectionRequestTimeout(connectionTimeout * 1000)
|
||||
.setConnectTimeout(connectionTimeout * 1000)
|
||||
.build();
|
||||
|
||||
CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setDefaultRequestConfig(requestConfig)
|
||||
.build();
|
||||
|
||||
HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(httpClient);
|
||||
|
||||
return new RestTemplate(factory);
|
||||
}
|
||||
}
|
||||
package ru.micord.ervu.eks;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import net.javacrumbs.shedlock.core.LockProvider;
|
||||
import net.javacrumbs.shedlock.provider.jdbctemplate.JdbcTemplateLockProvider;
|
||||
import net.javacrumbs.shedlock.spring.annotation.EnableSchedulerLock;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.transaction.TransactionAutoConfiguration;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
|
||||
import ru.cg.webbpm.modules.webkit.DispatcherConfig;
|
||||
|
||||
@Configuration
|
||||
@ComponentScan(basePackages = {
|
||||
"service",
|
||||
"dao",
|
||||
"bpmn",
|
||||
"i18n",
|
||||
"errorhandling",
|
||||
"database",
|
||||
"security",
|
||||
"component.addresses",
|
||||
"gen",
|
||||
"ru.cg",
|
||||
"ru.micord",
|
||||
"com.example.ervu_eks_5"
|
||||
})
|
||||
@EnableAspectJAutoProxy(proxyTargetClass = true)
|
||||
@EnableScheduling()
|
||||
@EnableSchedulerLock(defaultLockAtMostFor = "PT4H")
|
||||
@SpringBootApplication(exclude = {
|
||||
HibernateJpaAutoConfiguration.class,
|
||||
TransactionAutoConfiguration.class,
|
||||
})
|
||||
public class App extends DispatcherConfig {
|
||||
|
||||
@Value("${config.data.executor.socket.timeout:10}")
|
||||
private int socketTimeout;
|
||||
@Value("${config.data.executor.connection.timeout:10}")
|
||||
private int connectionTimeout;
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(App.class, args);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
|
||||
return new PropertySourcesPlaceholderConfigurer();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public LockProvider lockProvider(DataSource dataSource) {
|
||||
return new JdbcTemplateLockProvider(dataSource);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public RestTemplate restTemplate() {
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
.setSocketTimeout(socketTimeout * 1000)
|
||||
.setConnectionRequestTimeout(connectionTimeout * 1000)
|
||||
.setConnectTimeout(connectionTimeout * 1000)
|
||||
.build();
|
||||
|
||||
CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setDefaultRequestConfig(requestConfig)
|
||||
.build();
|
||||
|
||||
HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(httpClient);
|
||||
|
||||
return new RestTemplate(factory);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
package ru.micord.ervu_eks.exception;
|
||||
|
||||
import org.springframework.context.support.MessageSourceAccessor;
|
||||
import org.springframework.context.support.ResourceBundleMessageSource;
|
||||
import ru.cg.webbpm.modules.core.runtime.api.LocalizedException;
|
||||
import ru.cg.webbpm.modules.core.runtime.api.MessageBundleUtils;
|
||||
|
||||
public class ConfigExecutorException extends LocalizedException {
|
||||
|
||||
private String forcedMessage = null;
|
||||
|
||||
private static final MessageSourceAccessor MESSAGE_SOURCE = MessageBundleUtils.createAccessor(
|
||||
"i18n/exception_handler_messages"
|
||||
);;
|
||||
|
||||
public ConfigExecutorException(String code, Throwable cause) {
|
||||
super(code, MESSAGE_SOURCE, cause);
|
||||
}
|
||||
|
||||
public ConfigExecutorException(String code, Throwable cause, String forcedMessage) {
|
||||
super(code, MESSAGE_SOURCE, cause);
|
||||
this.forcedMessage = forcedMessage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
if (forcedMessage != null) {
|
||||
return forcedMessage;
|
||||
} else {
|
||||
return super.getLocalizedMessage();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
package ru.micord.ervu_eks.handler;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
import ru.micord.ervu_eks.exception.ConfigExecutorException;
|
||||
|
||||
import ru.cg.webbpm.modules.core.error_handling.api.ProcessedWebException;
|
||||
import ru.cg.webbpm.modules.core.error_handling.api.WebExceptionHandler;
|
||||
|
||||
@Component
|
||||
public class ConfigExecutorExceptionHandler implements WebExceptionHandler {
|
||||
|
||||
@Override
|
||||
public boolean accept(Throwable throwable) {
|
||||
return throwable instanceof ConfigExecutorException;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double weight() {
|
||||
return 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProcessedWebException process(Throwable throwable) {
|
||||
return new ProcessedWebException()
|
||||
.addMessage(throwable.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -6,8 +6,12 @@ import java.util.Arrays;
|
|||
import java.util.Base64;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.reflect.TypeToken;
|
||||
import dto.ConfigExecuteRequest;
|
||||
import dto.ExportDataRequest;
|
||||
import model.FileModel;
|
||||
import org.slf4j.Logger;
|
||||
|
|
@ -17,10 +21,15 @@ import org.springframework.beans.factory.annotation.Value;
|
|||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.util.StreamUtils;
|
||||
import org.springframework.web.client.HttpClientErrorException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
import org.springframework.core.io.Resource;
|
||||
import ru.micord.ervu_eks.exception.ConfigExecutorException;
|
||||
|
||||
/**
|
||||
* @author Evgenii Malkov
|
||||
|
|
@ -41,41 +50,115 @@ public class ConfigExecutorService {
|
|||
|
||||
public FileModel exportData(ExportDataRequest request) {
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
HttpEntity<ExportDataRequest> entity = new HttpEntity<>(request, headers);
|
||||
|
||||
ResponseEntity<byte[]> response = restTemplate.exchange(
|
||||
url.concat("/").concat("downloadCSV"),
|
||||
HttpMethod.POST, entity, byte[].class
|
||||
);
|
||||
try {
|
||||
ResponseEntity<Resource> response = restTemplate.exchange(
|
||||
url.concat("/").concat("downloadCSV"), HttpMethod.POST, entity, Resource.class);
|
||||
String content = "";
|
||||
if (response.getBody() != null) {
|
||||
byte[] fileBytes = StreamUtils.copyToByteArray(response.getBody().getInputStream());
|
||||
content = Base64.getEncoder().encodeToString(fileBytes);
|
||||
}
|
||||
String fileExtension = ".csv";
|
||||
List<String> contentDisposition = response.getHeaders().get(HttpHeaders.CONTENT_DISPOSITION);
|
||||
if (contentDisposition != null && !contentDisposition.isEmpty()) {
|
||||
String disposition = contentDisposition.get(0);
|
||||
int idx = disposition.indexOf("filename=");
|
||||
if (idx != -1) {
|
||||
String fileNameFromHeader = disposition.substring(idx + 9).replace("\"", "");
|
||||
int dotIndex = fileNameFromHeader.lastIndexOf(".");
|
||||
if (dotIndex != -1) {
|
||||
fileExtension = fileNameFromHeader.substring(dotIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
String fileName =
|
||||
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date())
|
||||
+ fileExtension;
|
||||
FileModel fileModel = new FileModel();
|
||||
fileModel.setFileContent(content);
|
||||
fileModel.setFileExtension(fileExtension);
|
||||
fileModel.setFileName(fileName);
|
||||
return fileModel;
|
||||
}
|
||||
catch (HttpClientErrorException e) {
|
||||
if (e.getStatusCode() == HttpStatus.BAD_REQUEST) {
|
||||
Map<String, Object> responseMap = new Gson().fromJson(e.getResponseBodyAsString(),
|
||||
new TypeToken<Map<String, Object>>() {
|
||||
}.getType()
|
||||
);
|
||||
|
||||
String content = Base64.getEncoder().encodeToString(response.getBody());
|
||||
FileModel fileModel = new FileModel();
|
||||
fileModel.setFileContent(content);
|
||||
fileModel.setFileExtension(".csv");
|
||||
fileModel.setFileName(
|
||||
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date()) + ".csv");
|
||||
return fileModel;
|
||||
if (responseMap.get("details") instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> details = (Map<String, String>) responseMap.get("details");
|
||||
String detailsStr = String.join("\n", details.values());
|
||||
throw new ConfigExecutorException("eks.error.misc", new RuntimeException(detailsStr, e), detailsStr);
|
||||
}
|
||||
|
||||
throw new ConfigExecutorException((String) responseMap.get("details"), e);
|
||||
}
|
||||
else {
|
||||
throw new RuntimeException("Export data failed with error", e);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new RuntimeException("Export data failed with error", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void call(String methodPath, List<String> ids) {
|
||||
public String call(String methodPath, ConfigExecuteRequest configExecuteRequest,
|
||||
boolean withDate) {
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
HttpEntity<List<String>> entity = new HttpEntity<>(ids, headers);
|
||||
LOGGER.info("Starts call config executor service with method: {}, for ids: {}", methodPath,
|
||||
ids
|
||||
HttpEntity<?> entity;
|
||||
if (withDate) {
|
||||
entity = new HttpEntity<>(configExecuteRequest, headers);
|
||||
}
|
||||
else {
|
||||
entity = new HttpEntity<>(configExecuteRequest.getIds(), headers);
|
||||
}
|
||||
LOGGER.info("Starts call config executor service with method: {}, for request: {}", methodPath,
|
||||
configExecuteRequest
|
||||
);
|
||||
try {
|
||||
ResponseEntity<Object> response = restTemplate.exchange(url.concat(methodPath),
|
||||
HttpMethod.POST, entity, Object.class
|
||||
ResponseEntity<String> response = restTemplate.exchange(url.concat(methodPath),
|
||||
HttpMethod.POST, entity, String.class
|
||||
);
|
||||
LOGGER.info("Method: {}, executed with status: {}, for ids:{}", methodPath,
|
||||
response.getStatusCode().value(), ids
|
||||
LOGGER.info("Method: {}, executed with status: {}, for request:{}", methodPath,
|
||||
response.getStatusCode().value(), configExecuteRequest
|
||||
);
|
||||
return response.getBody();
|
||||
}
|
||||
catch (HttpClientErrorException e) {
|
||||
|
||||
if (e.getStatusCode() == HttpStatus.BAD_REQUEST) {
|
||||
Map<String, Object> responseMap = new Gson().fromJson(e.getResponseBodyAsString(),
|
||||
new TypeToken<Map<String, Object>>() {
|
||||
}.getType()
|
||||
);
|
||||
|
||||
if (responseMap.get("details") instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> details = (Map<String, String>) responseMap.get("details");
|
||||
String detailsStr = String.join("\n", details.values());
|
||||
throw new ConfigExecutorException("eks.error.misc", new RuntimeException(detailsStr, e), detailsStr);
|
||||
}
|
||||
|
||||
throw new ConfigExecutorException((String) responseMap.get("details"), e);
|
||||
}
|
||||
else {
|
||||
throw new RuntimeException(
|
||||
String.format("Failed call config executor service method: %s for request: %s with error",
|
||||
methodPath, configExecuteRequest
|
||||
), e);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
String.format("Failed call config executor service method: %s for ids: %s with error",
|
||||
methodPath, ids
|
||||
String.format("Failed call config executor service method: %s for request: %s with error",
|
||||
methodPath, configExecuteRequest
|
||||
), e);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
error.unknown=Произошла неизвестная ошибка, обратитесь в службу технической поддержки
|
||||
eks.error.argument.missing=Произошла ошибка ввода данных. Проверьте заполненность полей
|
||||
eks.error.argument.invalid=Поле «ИД ЕРВУ граждан или иные сведения» заполнено неверно
|
||||
eks.error.misc=Произошла неизвестная ошибка, обратитесь в службу технической поддержки
|
||||
eks.error.date.empty=Пустые даты не допускаются
|
||||
eks.error.date.order=Начальная дата не может быть позже конечной
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
error.unknown=Произошла неизвестная ошибка, обратитесь в службу технической поддержки
|
||||
eks.error.argument.missing=Произошла ошибка ввода данных. Проверьте заполненность полей
|
||||
eks.error.argument.invalid=Поле «ИД ЕРВУ граждан или иные сведения» заполнено неверно
|
||||
eks.error.misc=Произошла неизвестная ошибка, обратитесь в службу технической поддержки
|
||||
eks.error.date.empty=Пустые даты не допускаются
|
||||
eks.error.date.order=Начальная дата не может быть позже конечной
|
||||
|
|
@ -3,23 +3,99 @@
|
|||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>ru.micord.ervu</groupId>
|
||||
<artifactId>eks</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>config-data-executor</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<spring-boot.version>2.7.18</spring-boot.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-bom</artifactId>
|
||||
<version>1.12.770</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-framework-bom</artifactId>
|
||||
<version>5.3.33</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<version>${spring-boot.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>5.8.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>4.6.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-junit-jupiter</artifactId>
|
||||
<version>4.6.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.assertj</groupId>
|
||||
<artifactId>assertj-core</artifactId>
|
||||
<version>3.23.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.atomikos</groupId>
|
||||
<artifactId>transactions-jta</artifactId>
|
||||
<version>6.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.atomikos</groupId>
|
||||
<artifactId>transactions-jdbc</artifactId>
|
||||
<version>6.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.34</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.arangodb</groupId>
|
||||
<artifactId>arangodb-java-driver</artifactId>
|
||||
<version>7.7.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
<version>${spring-boot.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>javax.transaction-api</artifactId>
|
||||
<version>1.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.7.7</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
|
@ -28,6 +104,31 @@
|
|||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.assertj</groupId>
|
||||
<artifactId>assertj-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
|
|
@ -70,11 +171,17 @@
|
|||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.xml.bind</groupId>
|
||||
<artifactId>jakarta.xml.bind-api</artifactId>
|
||||
<version>4.0.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>4.0.5</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<repositories>
|
||||
<repository>
|
||||
|
|
@ -84,16 +191,8 @@
|
|||
</repository>
|
||||
</repositories>
|
||||
<build>
|
||||
<finalName>${artifactId}</finalName>
|
||||
<finalName>${project.artifactId}</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<configuration>
|
||||
<failOnMissingWebXml>false</failOnMissingWebXml>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
|
|
@ -109,6 +208,17 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.22.2</version>
|
||||
<configuration>
|
||||
<includes>
|
||||
<include>**/*Test.java</include>
|
||||
<include>**/*Tests.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
package org.micord;
|
||||
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
|
||||
|
||||
@SpringBootApplication
|
||||
public class SpringBootTomcatApplication extends SpringBootServletInitializer {
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ package org.micord.config;
|
|||
import com.arangodb.ArangoDB;
|
||||
import com.arangodb.ArangoDBException;
|
||||
import com.arangodb.ArangoDatabase;
|
||||
import org.micord.models.AqlConnectionParams;
|
||||
import org.micord.models.requests.AqlConnectionParams;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
|
|
|
|||
|
|
@ -1,38 +1,29 @@
|
|||
package org.micord.config;
|
||||
|
||||
import com.atomikos.icatch.jta.UserTransactionImp;
|
||||
import com.atomikos.icatch.jta.UserTransactionManager;
|
||||
import javax.transaction.TransactionManager;
|
||||
import javax.transaction.UserTransaction;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
import org.springframework.transaction.jta.JtaTransactionManager;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
|
||||
@Configuration
|
||||
@EnableTransactionManagement
|
||||
public class AtomikosConfig {
|
||||
|
||||
@Bean
|
||||
public UserTransaction userTransaction() throws Throwable {
|
||||
UserTransactionImp userTransactionImp = new UserTransactionImp();
|
||||
userTransactionImp.setTransactionTimeout(300);
|
||||
return userTransactionImp;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionManager atomikosTransactionManager() {
|
||||
@Bean(initMethod = "init", destroyMethod = "close")
|
||||
public UserTransactionManager userTransactionManager() {
|
||||
UserTransactionManager userTransactionManager = new UserTransactionManager();
|
||||
userTransactionManager.setForceShutdown(true);
|
||||
return userTransactionManager;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JtaTransactionManager transactionManager() throws Throwable {
|
||||
return new JtaTransactionManager(userTransaction(), atomikosTransactionManager());
|
||||
public JtaTransactionManager transactionManager() {
|
||||
JtaTransactionManager jtaTransactionManager = new JtaTransactionManager();
|
||||
jtaTransactionManager.setTransactionManager(userTransactionManager());
|
||||
jtaTransactionManager.setUserTransaction(userTransactionManager());
|
||||
return jtaTransactionManager;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
package org.micord.config;
|
||||
|
||||
import com.atomikos.jdbc.AtomikosDataSourceBean;
|
||||
import org.micord.models.SqlConnectionParams;
|
||||
import org.micord.models.requests.SqlConnectionParams;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
@ -43,7 +43,7 @@ public class DatabaseConnection {
|
|||
|
||||
if (!dataSources.containsKey(database)) {
|
||||
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
|
||||
xaDataSource.setUniqueResourceName("jdbcDatasource_" + database);
|
||||
xaDataSource.setUniqueResourceName(database);
|
||||
xaDataSource.setXaDataSourceClassName(params.getJdbcXaDataSourceClassName());
|
||||
xaDataSource.setPoolSize(Integer.parseInt(params.getJdbcXaDataSourcePoolSize()));
|
||||
|
||||
|
|
|
|||
|
|
@ -1,38 +1,37 @@
|
|||
package org.micord.config;
|
||||
|
||||
import org.micord.models.S3ConnectionParams;
|
||||
import org.micord.models.S3Request;
|
||||
import org.micord.models.requests.S3ConnectionParams;
|
||||
import org.micord.models.requests.S3Request;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.net.URI;
|
||||
import java.net.URLEncoder;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Base64;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
public class S3HttpConnection {
|
||||
|
||||
public static HttpRequest buildHttpRequest(S3Request request, String file) throws Exception {
|
||||
S3ConnectionParams connectionParams = request.getS3ConnectionParams();
|
||||
String host = connectionParams.getHost() + ":" + connectionParams.getPort();
|
||||
String host = connectionParams.getProtocol() + "://" + connectionParams.getHost() + ":" + connectionParams.getPort();
|
||||
String s3Key = connectionParams.getS3Key();
|
||||
String s3Secret = connectionParams.getS3Secret();
|
||||
String method = connectionParams.getMethod().toUpperCase();
|
||||
String body = connectionParams.getBody();
|
||||
|
||||
String resource = "/" + file;
|
||||
String resource = "/" + encodeFileName(file);
|
||||
String contentType = connectionParams.getContentType();
|
||||
String date = ZonedDateTime.now().format(DateTimeFormatter.RFC_1123_DATE_TIME);
|
||||
String date = ZonedDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'"));
|
||||
|
||||
String signature = generateSignature(method, contentType, date, resource, s3Secret);
|
||||
|
||||
HttpRequest.Builder requestBuilder = HttpRequest.newBuilder()
|
||||
.uri(URI.create("http://" + host + resource))
|
||||
.uri(URI.create(host + resource))
|
||||
.header("Date", date)
|
||||
.header("Content-Type", contentType)
|
||||
.header("Authorization", "AWS " + s3Key + ":" + signature);
|
||||
|
|
@ -73,4 +72,14 @@ public class S3HttpConnection {
|
|||
return Base64.getEncoder().encodeToString(hash);
|
||||
}
|
||||
|
||||
public static String encodeFileName(String path) {
|
||||
String[] parts = path.split("/");
|
||||
StringBuilder result = new StringBuilder();
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
if (i > 0) result.append("/");
|
||||
result.append(URLEncoder.encode(parts[i], StandardCharsets.UTF_8));
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,7 @@
|
|||
package org.micord.controller;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.micord.models.DownloadCSVRequest;
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.service.ApiService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
@ -18,6 +13,13 @@ import org.springframework.http.MediaType;
|
|||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* REST Controller for API operations.
|
||||
*/
|
||||
|
|
@ -30,67 +32,108 @@ public class ApiController {
|
|||
@Autowired
|
||||
private ApiService apiService;
|
||||
|
||||
@PostMapping("/block")
|
||||
public ResponseEntity<?> block(@RequestBody List<String> ids) throws FileNotFoundException {
|
||||
logger.debug("Starting block process for ids: {}", ids);
|
||||
apiService.process("block", ids);
|
||||
logger.debug("Finished block process for ids: {}", ids);
|
||||
return ResponseEntity.ok("");
|
||||
@PostMapping("/removeMilitaryDraftNotices")
|
||||
public ResponseEntity<?> removeMilitaryDraftNotices(@RequestBody RequestParameters request) throws SQLException, FileNotFoundException {
|
||||
List<String> ids = request.getIds();
|
||||
|
||||
logger.debug("Starting removeMilitaryDraftNotices process for ids: {}", ids);
|
||||
apiService.process(ConfigType.REMOVE_MILITARY_DRAFT_NOTICES, request);
|
||||
logger.debug("Finished removeMilitaryDraftNotices process for ids: {}", ids);
|
||||
|
||||
return ResponseEntity.ok("Операция \"Удаление повесток\" завершена успешно.");
|
||||
}
|
||||
|
||||
@PostMapping("/deleteFiles")
|
||||
public ResponseEntity<?> deleteFiles(@RequestBody List<String> ids) throws FileNotFoundException, SQLException {
|
||||
|
||||
apiService.process(ConfigType.DELETE_FILES, ids);
|
||||
|
||||
return ResponseEntity.ok("Операция \"Удаление файлов\" завершена успешно.");
|
||||
}
|
||||
|
||||
@PostMapping("/block")
|
||||
public ResponseEntity<?> block(@RequestBody List<String> ids) throws SQLException, FileNotFoundException {
|
||||
|
||||
logger.debug("Starting block process for ids: {}", ids);
|
||||
apiService.process(ConfigType.BLOCK, ids);
|
||||
logger.debug("Finished block process for ids: {}", ids);
|
||||
|
||||
return ResponseEntity.ok("Операция \"Блокировка\" завершена успешно.");
|
||||
}
|
||||
|
||||
|
||||
@PostMapping("/unblock")
|
||||
public ResponseEntity<?> unblock(@RequestBody List<String> ids) throws FileNotFoundException {
|
||||
public ResponseEntity<?> unblock(@RequestBody List<String> ids) throws SQLException, FileNotFoundException {
|
||||
|
||||
logger.debug("Starting unblock process for ids: {}", ids);
|
||||
apiService.process("unblock", ids);
|
||||
apiService.process(ConfigType.UNBLOCK, ids);
|
||||
logger.debug("Finished unblock process for ids: {}", ids);
|
||||
return ResponseEntity.ok("");
|
||||
|
||||
return ResponseEntity.ok("Операция \"Разблокировка\" завершена успешно.");
|
||||
}
|
||||
|
||||
|
||||
@PostMapping("/removeFromSystem")
|
||||
public ResponseEntity<?> removeFromSystem(@RequestBody List<String> ids)
|
||||
throws FileNotFoundException {
|
||||
public ResponseEntity<?> removeFromSystem(@RequestBody List<String> ids) throws SQLException, FileNotFoundException {
|
||||
|
||||
logger.debug("Starting removeFromSystem process for ids: {}", ids);
|
||||
apiService.process("removeFromSystem", ids);
|
||||
apiService.process(ConfigType.REMOVE_FROM_SYSTEM, ids);
|
||||
logger.debug("Finished removeFromSystem process for ids: {}", ids);
|
||||
return ResponseEntity.ok("");
|
||||
|
||||
return ResponseEntity.ok("Операция \"Удаление данных по гражданину\" завершена успешно.");
|
||||
}
|
||||
|
||||
|
||||
@PostMapping("/removeFromCallList")
|
||||
public ResponseEntity<?> removeFromCallList(@RequestBody List<String> ids)
|
||||
throws FileNotFoundException {
|
||||
public ResponseEntity<?> removeFromCallList(@RequestBody List<String> ids) throws SQLException, FileNotFoundException {
|
||||
|
||||
logger.debug("Starting removeFromCallList process for ids: {}", ids);
|
||||
apiService.process("removeFromCallList", ids);
|
||||
apiService.process(ConfigType.REMOVE_FROM_CALL_LIST, ids);
|
||||
logger.debug("Finished removeFromCallList process for ids: {}", ids);
|
||||
return ResponseEntity.ok("");
|
||||
|
||||
return ResponseEntity.ok("Операция \"Удаление из списков на вызов\" завершена успешно.");
|
||||
}
|
||||
|
||||
@PostMapping("/downloadCSV")
|
||||
public ResponseEntity<Resource> downloadCSV(@RequestBody DownloadCSVRequest request)
|
||||
throws IOException {
|
||||
logger.debug("Starting downloadCSV process for request: {}", request.getType());
|
||||
if (request.getStartDate() != null && request.getEndDate() != null) {
|
||||
if (request.getStartDate().isAfter(request.getEndDate())) {
|
||||
throw new IllegalArgumentException("Start date must be before end date");
|
||||
}
|
||||
}
|
||||
|
||||
File csvFile = apiService.download("downloadCSV", request);
|
||||
InputStreamResource resource = new InputStreamResource(new FileInputStream(csvFile));
|
||||
@PostMapping("/downloadCSV")
|
||||
public ResponseEntity<Resource> downloadCSV(@RequestBody RequestParameters request) throws IOException, SQLException {
|
||||
logger.debug("Starting downloadCSV process for request: {}", request.getType());
|
||||
|
||||
File downloadFile = apiService.download(ConfigType.DOWNLOAD_CSV, request);
|
||||
InputStreamResource resource = new InputStreamResource(new FileInputStream(downloadFile));
|
||||
|
||||
logger.debug("Finished downloadCSV process for request: {}. Sending to user...", request.getType());
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + csvFile.getName())
|
||||
.contentType(MediaType.parseMediaType("text/csv"))
|
||||
.contentLength(csvFile.length())
|
||||
.body(resource);
|
||||
// Determine content type based on file extension
|
||||
String fileName = downloadFile.getName();
|
||||
MediaType contentType;
|
||||
if (fileName.toLowerCase().endsWith(".zip")) {
|
||||
contentType = MediaType.APPLICATION_OCTET_STREAM;
|
||||
} else {
|
||||
contentType = MediaType.parseMediaType("text/csv; charset=UTF-8");
|
||||
}
|
||||
|
||||
ResponseEntity.BodyBuilder response = ResponseEntity.ok()
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + fileName)
|
||||
.contentType(contentType)
|
||||
.contentLength(downloadFile.length());
|
||||
|
||||
// Only add content encoding for CSV files
|
||||
if (!fileName.endsWith(".zip")) {
|
||||
response.header(HttpHeaders.CONTENT_ENCODING, "UTF-8");
|
||||
}
|
||||
|
||||
return response.body(resource);
|
||||
}
|
||||
|
||||
@GetMapping("/listDownloadTypes")
|
||||
public ResponseEntity<?> listDownloadTypes()
|
||||
throws FileNotFoundException {
|
||||
List<String> downloadCSVTypes = apiService.getDownloadTypes("downloadCSV");
|
||||
|
||||
@GetMapping("/listDownloadTypes")
|
||||
public ResponseEntity<List<String>> listDownloadTypes() throws FileNotFoundException {
|
||||
logger.debug("Fetching list of download types...");
|
||||
|
||||
List<String> downloadCSVTypes = apiService.getDownloadTypes(ConfigType.DOWNLOAD_CSV);
|
||||
|
||||
logger.debug("Successfully retrieved download types");
|
||||
return ResponseEntity.ok(downloadCSVTypes);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
package org.micord.enums;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public enum ConfigType {
|
||||
|
||||
BLOCK("block"),
|
||||
DELETE_FILES("deleteFiles"),
|
||||
UNBLOCK("unblock"),
|
||||
REMOVE_FROM_SYSTEM("removeFromSystem"),
|
||||
REMOVE_FROM_CALL_LIST("removeFromCallList"),
|
||||
DOWNLOAD_CSV("downloadCSV"),
|
||||
VALIDATE_BLOCK("validateBlock"),
|
||||
REMOVE_MILITARY_DRAFT_NOTICES("removeMilitaryDraftNotices");
|
||||
|
||||
private final String type;
|
||||
|
||||
ConfigType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
package org.micord.exceptions;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.ExceptionHandler;
|
||||
import org.springframework.web.bind.annotation.RestControllerAdvice;
|
||||
|
||||
import javax.naming.ServiceUnavailableException;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.nio.file.AccessDeniedException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
@RestControllerAdvice
|
||||
public class GlobalExceptionHandler {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(GlobalExceptionHandler.class);
|
||||
|
||||
@ExceptionHandler(SQLException.class)
|
||||
public ResponseEntity<?> handleSQLException(SQLException e) {
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Map.of(
|
||||
"message", "Database error occurred",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(Exception.class)
|
||||
public ResponseEntity<?> handleGeneralException(Exception e) {
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Map.of(
|
||||
"message", "Unexpected error occurred",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(ValidationException.class)
|
||||
public ResponseEntity<?> handleValidationException(ValidationException e) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"message", "Validation error occurred",
|
||||
"details", e.getValidationDetails()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(IllegalStateException.class)
|
||||
public ResponseEntity<?> handleIllegalStateException(IllegalStateException e) {
|
||||
return ResponseEntity.status(HttpStatus.CONFLICT).body(Map.of(
|
||||
"message", "Operation cannot be performed due to an invalid state",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(AccessDeniedException.class)
|
||||
public ResponseEntity<?> handleAccessDeniedException(AccessDeniedException e) {
|
||||
return ResponseEntity.status(HttpStatus.FORBIDDEN).body(Map.of(
|
||||
"message", "Access denied",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(ServiceUnavailableException.class)
|
||||
public ResponseEntity<?> handleServiceUnavailableException(ServiceUnavailableException e) {
|
||||
return ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE).body(Map.of(
|
||||
"message", "Service is temporarily unavailable",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(FileNotFoundException.class)
|
||||
public ResponseEntity<?> handleFileNotFoundException(FileNotFoundException e) {
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(Map.of(
|
||||
"message", "File not found",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(IllegalArgumentException.class)
|
||||
public ResponseEntity<?> handleIllegalArgumentException(IllegalArgumentException e) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"message", "Invalid input provided",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(IllegalRequestParametersException.class)
|
||||
public ResponseEntity<?> handleIllegalRequestParametersException(IllegalRequestParametersException e) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"message", "Произошла ошибка ввода данных. Проверьте правильность заполнения полей",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(NoSuchElementException.class)
|
||||
public ResponseEntity<?> handleNoSuchElementException(NoSuchElementException e) {
|
||||
logger.error("Resource not found: {}", e.getMessage());
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(Map.of(
|
||||
"message", "Requested resource not found",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(RuntimeException.class)
|
||||
public ResponseEntity<?> handleRuntimeException(RuntimeException e) {
|
||||
logger.error("Unexpected error occurred: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Map.of(
|
||||
"message", "Internal server error",
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
@ExceptionHandler(NoDownloadReportRecordsException.class)
|
||||
public ResponseEntity<?> handleNoDownloadReportRecordsException(NoDownloadReportRecordsException e) {
|
||||
logger.error("Unexpected error occurred: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Map.of(
|
||||
"message", e.getMessage(),
|
||||
"details", e.getMessage()
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
package org.micord.exceptions;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public class IllegalRequestParametersException extends IllegalArgumentException {
|
||||
|
||||
public IllegalRequestParametersException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
package org.micord.exceptions;
|
||||
|
||||
import java.io.Serial;
|
||||
|
||||
public class NoDownloadReportRecordsException extends RuntimeException {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public NoDownloadReportRecordsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
package org.micord.exceptions;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@Getter
|
||||
public class ValidationException extends RuntimeException {
|
||||
private final Map<String, String> validationDetails;
|
||||
|
||||
public ValidationException(String message, Map<String, String> validationDetails) {
|
||||
super(message);
|
||||
this.validationDetails = validationDetails;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -5,11 +5,7 @@ import lombok.Setter;
|
|||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlElementWrapper;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
public class AqlRequest extends BaseRequest {
|
||||
|
||||
|
|
@ -27,16 +23,4 @@ public class AqlRequest extends BaseRequest {
|
|||
return aqlRequestCollections;
|
||||
}
|
||||
|
||||
public List<AqlRequestCollection> getReadCollections() {
|
||||
return aqlRequestCollections.stream()
|
||||
.filter(collection -> collection.getType() != null && collection.getType().contains("read"))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<AqlRequestCollection> getWriteCollections() {
|
||||
return aqlRequestCollections.stream()
|
||||
.filter(collection -> collection.getType() != null && collection.getType().contains("write"))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,14 +5,11 @@ import lombok.Setter;
|
|||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlValue;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
public class AqlRequestCollection {
|
||||
|
||||
private String type;
|
||||
private String collectionName;
|
||||
private String collectionUrl;
|
||||
|
||||
@XmlAttribute(name = "type")
|
||||
public String getType() {
|
||||
|
|
@ -20,8 +17,8 @@ public class AqlRequestCollection {
|
|||
}
|
||||
|
||||
@XmlValue
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
public String getCollectionUrl() {
|
||||
return collectionUrl;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,21 +1,16 @@
|
|||
package org.micord.models;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
import java.nio.file.attribute.FileTime;
|
||||
|
||||
public class CachedConfig {
|
||||
private final Requests config;
|
||||
@Getter
|
||||
public class CachedConfig<T> {
|
||||
private final T config;
|
||||
private final FileTime modifiedTime;
|
||||
|
||||
public CachedConfig(Requests config, FileTime modifiedTime) {
|
||||
public CachedConfig(T config, FileTime modifiedTime) {
|
||||
this.config = config;
|
||||
this.modifiedTime = modifiedTime;
|
||||
}
|
||||
|
||||
public Requests getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
public FileTime getModifiedTime() {
|
||||
return modifiedTime;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
package org.micord.models;
|
||||
|
||||
import lombok.Setter;
|
||||
import org.micord.enums.RequestArgumentType;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlElementWrapper;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import java.util.List;
|
||||
import lombok.Setter;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
|
|
|
|||
|
|
@ -1,41 +0,0 @@
|
|||
package org.micord.models;
|
||||
|
||||
import lombok.Setter;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
@XmlRootElement(name = "Requests")
|
||||
public class Requests {
|
||||
|
||||
private List<SqlRequest> sqlRequests;
|
||||
private List<AqlRequest> aqlRequests;
|
||||
private List<S3Request> s3Requests;
|
||||
private List<DownloadRequest> downloadRequests;
|
||||
|
||||
@XmlElement(name = "DownloadRequest")
|
||||
public List<DownloadRequest> getDownloadRequests() {
|
||||
return downloadRequests;
|
||||
}
|
||||
|
||||
@XmlElement(name = "SqlRequest")
|
||||
public List<SqlRequest> getSqlRequests() {
|
||||
return sqlRequests;
|
||||
}
|
||||
|
||||
@XmlElement(name = "AqlRequest")
|
||||
public List<AqlRequest> getAqlRequests() {
|
||||
return aqlRequests;
|
||||
}
|
||||
|
||||
@XmlElement(name = "S3Request")
|
||||
public List<S3Request> getS3Requests() {
|
||||
return s3Requests;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
package org.micord.models;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* @author Eduard Tihomirov
|
||||
*/
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public class SqlDownloadBuildQueryResponse {
|
||||
|
||||
private String sql;
|
||||
private String paramName;
|
||||
private List<String> params;
|
||||
}
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Setter;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
public class AqlConnectionParams {
|
||||
|
||||
private String host;
|
||||
private int port;
|
||||
private String username;
|
||||
private String password;
|
||||
private String database;
|
||||
private String collection;
|
||||
|
||||
@XmlElement(name = "Host")
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Port")
|
||||
public int getPort() {
|
||||
return port;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Username")
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Password")
|
||||
public String getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Database")
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Collection")
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlElementWrapper;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class AqlRequest extends BaseRequest {
|
||||
|
||||
@XmlElement(name = "AqlConnectionParams")
|
||||
private AqlConnectionParams aqlConnectionParams;
|
||||
|
||||
@XmlElement(name = "AqlRequestParameters")
|
||||
private AqlRequestParameters aqlRequestParameters;
|
||||
|
||||
@XmlElementWrapper(name = "AqlRequestCollections")
|
||||
@XmlElement(name = "AqlRequestCollection")
|
||||
private List<AqlRequestCollection> aqlRequestCollections;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlValue;
|
||||
import lombok.Setter;
|
||||
|
||||
@Setter
|
||||
public class AqlRequestCollection {
|
||||
|
||||
private String type;
|
||||
private String dateAttribute;
|
||||
private String collectionUrl;
|
||||
|
||||
@XmlAttribute(name = "type")
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "dateAttribute")
|
||||
public String getDateAttribute() {
|
||||
return dateAttribute;
|
||||
}
|
||||
|
||||
@XmlValue
|
||||
public String getCollectionUrl() {
|
||||
return collectionUrl;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name = "AqlRequestParameter")
|
||||
public class AqlRequestParameter {
|
||||
|
||||
private String type;
|
||||
private SqlConnectionParams sqlConnectionParams;
|
||||
private String aqlRequestParameterURL;
|
||||
|
||||
@XmlAttribute(name = "type")
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@XmlElement(name = "SqlConnectionParams")
|
||||
public SqlConnectionParams getSqlConnectionParams() {
|
||||
return sqlConnectionParams;
|
||||
}
|
||||
|
||||
public void setSqlConnectionParams(SqlConnectionParams sqlConnectionParams) {
|
||||
this.sqlConnectionParams = sqlConnectionParams;
|
||||
}
|
||||
|
||||
@XmlElement(name = "AqlRequestParameterURL")
|
||||
public String getAqlRequestParameterURL() {
|
||||
return aqlRequestParameterURL;
|
||||
}
|
||||
|
||||
public void setAqlRequestParameterURL(String aqlRequestParameterURL) {
|
||||
this.aqlRequestParameterURL = aqlRequestParameterURL;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name = "AqlRequestParameters")
|
||||
public class AqlRequestParameters {
|
||||
|
||||
private List<AqlRequestParameter> parameters = new ArrayList<>();
|
||||
|
||||
@XmlElement(name = "AqlRequestParameter")
|
||||
public List<AqlRequestParameter> getParameters() {
|
||||
return parameters;
|
||||
}
|
||||
|
||||
public void setParameters(List<AqlRequestParameter> parameters) {
|
||||
this.parameters = parameters;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public abstract class BaseRequest {
|
||||
|
||||
@XmlElement(name = "RequestArgument")
|
||||
private List<RequestArgument> requestArguments;
|
||||
|
||||
@XmlElement(name = "RequestURL")
|
||||
private String requestURL;
|
||||
|
||||
@XmlElement(name = "RequestArgumentLimit")
|
||||
private Integer requestArgumentLimit;
|
||||
|
||||
@XmlElement(name = "RequestValidationRules")
|
||||
private RequestValidationRules requestValidationRules;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlSeeAlso;
|
||||
import lombok.Setter;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
@XmlSeeAlso({SqlRequest.class, S3Request.class})
|
||||
public abstract class Request {
|
||||
|
||||
private List<RequestArgument> requestArguments;
|
||||
private String requestURL;
|
||||
|
||||
@XmlElement(name = "RequestArgument")
|
||||
public List<RequestArgument> getRequestArguments() {
|
||||
return requestArguments;
|
||||
}
|
||||
|
||||
@XmlElement(name = "RequestURL")
|
||||
public String getRequestURL() {
|
||||
return requestURL;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import org.micord.enums.RequestArgumentType;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import lombok.Setter;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Setter
|
||||
@XmlRootElement(name = "RequestArgument")
|
||||
public class RequestArgument {
|
||||
|
||||
private RequestArgumentType type;
|
||||
private String requestArgumentName;;
|
||||
private String requestArgumentURL;
|
||||
private SqlConnectionParams requestArgumentConnectionParams;
|
||||
|
||||
@XmlAttribute(name = "type")
|
||||
public RequestArgumentType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@XmlElement(name = "RequestArgumentName")
|
||||
public String getRequestArgumentName() {
|
||||
return requestArgumentName;
|
||||
}
|
||||
|
||||
@XmlElement(name = "RequestArgumentURL")
|
||||
public String getRequestArgumentURL() {
|
||||
return requestArgumentURL;
|
||||
}
|
||||
|
||||
@XmlElement(name = "RequestArgumentConnectionParams")
|
||||
public SqlConnectionParams getRequestArgumentConnectionParams() {
|
||||
return requestArgumentConnectionParams;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class RequestParameters {
|
||||
private String type;
|
||||
private List<String> ids;
|
||||
private LocalDate startDate;
|
||||
private LocalDate endDate;
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlRootElement(name = "RequestValidationRules")
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class RequestValidationRules {
|
||||
|
||||
@XmlAttribute(name = "isEmptyIdsAllowed")
|
||||
private Boolean isEmptyIdsAllowed = false;
|
||||
|
||||
@XmlAttribute(name = "isEmptyDatesAllowed")
|
||||
private Boolean isEmptyDatesAllowed = false;
|
||||
|
||||
@XmlAttribute(name = "isIdsFormatted")
|
||||
private Boolean isIdsFormatted = true;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.micord.models.requests.downloads.AQLDownloadRequest;
|
||||
import org.micord.models.requests.downloads.SQLDownloadRequest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlRootElement(name = "Requests")
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class Requests {
|
||||
|
||||
@XmlElement(name = "SqlRequest")
|
||||
private List<SqlRequest> sqlRequests;
|
||||
|
||||
@XmlElement(name = "AqlRequest")
|
||||
private List<AqlRequest> aqlRequests;
|
||||
|
||||
@XmlElement(name = "S3Request")
|
||||
private List<S3Request> s3Requests;
|
||||
|
||||
@XmlElement(name = "AQLDownloadRequest")
|
||||
private List<AQLDownloadRequest> aqlDownloadRequests;
|
||||
|
||||
@XmlElement(name = "SQLDownloadRequest")
|
||||
private List<SQLDownloadRequest> sqlDownloadRequests;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Setter;
|
||||
|
||||
@Setter
|
||||
public class S3ConnectionParams {
|
||||
|
||||
private String s3Key;
|
||||
private String s3Secret;
|
||||
private String protocol;
|
||||
private String host;
|
||||
private String port;
|
||||
private String contentType;
|
||||
private String method;
|
||||
private String body;
|
||||
|
||||
@XmlElement(name = "S3Key")
|
||||
public String getS3Key() {
|
||||
return s3Key;
|
||||
}
|
||||
|
||||
@XmlElement(name = "S3Secret")
|
||||
public String getS3Secret() {
|
||||
return s3Secret;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Protocol")
|
||||
public String getProtocol() {
|
||||
return protocol;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Host")
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Port")
|
||||
public String getPort() {
|
||||
return port;
|
||||
}
|
||||
|
||||
@XmlElement(name = "ContentType")
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Method")
|
||||
public String getMethod() {
|
||||
return method;
|
||||
}
|
||||
|
||||
@XmlElement(name = "Body")
|
||||
public String getBody() {
|
||||
return body;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class S3Request extends BaseRequest {
|
||||
|
||||
@XmlElement(name = "S3ConnectionParams")
|
||||
private S3ConnectionParams s3ConnectionParams;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Setter;
|
||||
|
||||
@Setter
|
||||
public class SqlConnectionParams {
|
||||
|
||||
private String jdbcHost;
|
||||
private String jdbcPort;
|
||||
private String jdbcUsername;
|
||||
private String jdbcPassword;
|
||||
private String jdbcDriverClassName;
|
||||
private String jdbcXaDataSourceClassName;
|
||||
private String jdbcXaDataSourcePoolSize;
|
||||
private String jdbcDatabase;
|
||||
private String jdbcXaDataSourceBorrowConnectionTimeout;
|
||||
|
||||
|
||||
@XmlElement(name = "JdbcXaDataSourceBorrowConnectionTimeout")
|
||||
public String getJdbcXaDataSourceBorrowConnectionTimeout() {
|
||||
return jdbcXaDataSourceBorrowConnectionTimeout;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcXaDataSourcePoolSize")
|
||||
public String getJdbcXaDataSourcePoolSize() {
|
||||
return jdbcXaDataSourcePoolSize;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcHost")
|
||||
public String getJdbcHost() {
|
||||
return jdbcHost;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcPort")
|
||||
public String getJdbcPort() {
|
||||
return jdbcPort;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcUsername")
|
||||
public String getJdbcUsername() {
|
||||
return jdbcUsername;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcPassword")
|
||||
public String getJdbcPassword() {
|
||||
return jdbcPassword;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcDriverClassName")
|
||||
public String getJdbcDriverClassName() {
|
||||
return jdbcDriverClassName;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcXaDataSourceClassName")
|
||||
public String getJdbcXaDataSourceClassName() {
|
||||
return jdbcXaDataSourceClassName;
|
||||
}
|
||||
|
||||
@XmlElement(name = "JdbcDatabase")
|
||||
public String getJdbcDatabase() {
|
||||
return jdbcDatabase;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
package org.micord.models.requests;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class SqlRequest extends BaseRequest {
|
||||
|
||||
@XmlElement(name = "SqlConnectionParams")
|
||||
private SqlConnectionParams sqlConnectionParams;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
package org.micord.models.requests.downloads;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.micord.models.requests.AqlConnectionParams;
|
||||
import org.micord.models.requests.AqlRequestCollection;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlElementWrapper;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class AQLDownloadRequest extends BaseDownloadRequest {
|
||||
|
||||
@XmlElement(name = "AqlConnectionParams")
|
||||
private AqlConnectionParams aqlConnectionParams;
|
||||
|
||||
@XmlElement(name = "DownloadRequestEntitySelectorQuery")
|
||||
private String downloadRequestEntitySelectorQuery;
|
||||
|
||||
@XmlElement(name = "AglDownloadLimit")
|
||||
private Integer aqlDownloadLimit;
|
||||
|
||||
@XmlElementWrapper(name = "AqlRequestCollections")
|
||||
@XmlElement(name = "AqlRequestCollection")
|
||||
private List<AqlRequestCollection> aqlRequestCollections;
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
package org.micord.models.requests.downloads;
|
||||
|
||||
import org.micord.models.requests.BaseRequest;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public abstract class BaseDownloadRequest extends BaseRequest {
|
||||
|
||||
@XmlElement(name = "DownloadRequestType")
|
||||
private String downloadRequestType;
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
package org.micord.models.requests.downloads;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAccessType;
|
||||
import jakarta.xml.bind.annotation.XmlAccessorType;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import org.micord.models.requests.SqlConnectionParams;
|
||||
|
||||
|
||||
@Setter
|
||||
@Getter
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class SQLDownloadRequest extends BaseDownloadRequest {
|
||||
|
||||
@XmlElement(name = "SqlConnectionParams")
|
||||
private SqlConnectionParams sqlConnectionParams;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
package org.micord.models.validations;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlAttribute;
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import lombok.Setter;
|
||||
import org.micord.models.requests.SqlConnectionParams;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
@Setter
|
||||
public class ValidationRule {
|
||||
|
||||
private SqlConnectionParams sqlConnectionParams;
|
||||
private String requestURL;
|
||||
private String idColumn;
|
||||
private List<String> validationColumns;
|
||||
|
||||
@XmlElement(name = "RequestURL")
|
||||
public String getRequestURL() {
|
||||
return requestURL;
|
||||
}
|
||||
|
||||
@XmlElement(name = "SqlConnectionParams")
|
||||
public SqlConnectionParams getSqlConnectionParams() {
|
||||
return sqlConnectionParams;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "validationColumns")
|
||||
public List<String> getValidationColumns() {
|
||||
return validationColumns;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "idColumn")
|
||||
public String getIdColumn() {
|
||||
return idColumn;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
package org.micord.models.validations;
|
||||
|
||||
import jakarta.xml.bind.annotation.XmlElement;
|
||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
||||
import lombok.Setter;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
@Setter
|
||||
@XmlRootElement(name = "ValidationRules")
|
||||
public class ValidationRules {
|
||||
|
||||
private List<ValidationRule> validationRules;
|
||||
|
||||
@XmlElement(name = "ValidationRule")
|
||||
public List<ValidationRule> getValidationRules() {
|
||||
return validationRules;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
package org.micord.service;
|
||||
|
||||
import org.micord.models.*;
|
||||
import org.micord.utils.ConfigLoader;
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.models.requests.downloads.*;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.models.requests.Requests;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
|
@ -10,60 +12,74 @@ import org.springframework.stereotype.Service;
|
|||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Service
|
||||
public class ApiService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ApiService.class);
|
||||
|
||||
@Autowired
|
||||
private ConfigLoader configLoader;
|
||||
|
||||
@Autowired
|
||||
private RequestService sqlAndAqlService;
|
||||
|
||||
@Autowired
|
||||
private DownloadService downloadService;
|
||||
|
||||
public void process(String methodName, List<String> ids) throws FileNotFoundException {
|
||||
Requests config = getConfig(methodName);
|
||||
@Autowired
|
||||
private ConfigService configService;
|
||||
|
||||
@Autowired
|
||||
private ValidationService validationService;
|
||||
|
||||
public void process(ConfigType methodName, List<String> ids) throws FileNotFoundException, SQLException {
|
||||
Requests config = configService.getConfig(methodName, Requests.class);
|
||||
sqlAndAqlService.processSqlAndAqlRequests(config, ids);
|
||||
}
|
||||
|
||||
public File download(String methodName, DownloadCSVRequest request) throws IOException {
|
||||
Requests config = getConfig(methodName);
|
||||
public void process(ConfigType methodName, RequestParameters parameters) throws FileNotFoundException, SQLException {
|
||||
Requests config = configService.getConfig(methodName, Requests.class);
|
||||
sqlAndAqlService.processSqlAndAqlRequests(config, parameters);
|
||||
}
|
||||
|
||||
String type = request.getType();
|
||||
List<String> ids = Optional.ofNullable(request.getIds())
|
||||
public File download(ConfigType methodName, RequestParameters downloadRequest) throws IOException, SQLException {
|
||||
Requests config = configService.getConfig(methodName, Requests.class);
|
||||
|
||||
String type = downloadRequest.getType();
|
||||
List<String> ids = Optional.ofNullable(downloadRequest.getIds())
|
||||
.filter(list -> !list.isEmpty())
|
||||
.orElse(null);
|
||||
|
||||
DownloadRequest selectedRequest = config.getDownloadRequests().stream()
|
||||
BaseDownloadRequest selectedRequest = config.getAqlDownloadRequests().stream()
|
||||
.filter(r -> r.getDownloadRequestType().equals(type))
|
||||
.findFirst()
|
||||
.map(BaseDownloadRequest.class::cast)
|
||||
.or(() -> config.getSqlDownloadRequests().stream()
|
||||
.filter(r -> r.getDownloadRequestType().equals(type))
|
||||
.findFirst()
|
||||
.map(BaseDownloadRequest.class::cast))
|
||||
.orElseThrow(() -> new IllegalArgumentException("Invalid download type: " + type));
|
||||
|
||||
return downloadService.download(selectedRequest, ids, request.getStartDate(), request.getEndDate());
|
||||
Map<String, Boolean> validationResults = validationService.validateDownloadRequest(selectedRequest, downloadRequest, ids);
|
||||
|
||||
return downloadService.download(selectedRequest, ids, downloadRequest, validationResults);
|
||||
}
|
||||
|
||||
public List<String> getDownloadTypes(String methodName) throws FileNotFoundException {
|
||||
Requests config = getConfig(methodName);
|
||||
public List<String> getDownloadTypes(ConfigType methodName) throws FileNotFoundException {
|
||||
Requests config = configService.getConfig(methodName, Requests.class);
|
||||
|
||||
return config.getDownloadRequests().stream()
|
||||
.map(DownloadRequest::getDownloadRequestType)
|
||||
return Stream.concat(
|
||||
Optional.ofNullable(config.getSqlDownloadRequests()).orElse(Collections.emptyList()).stream(),
|
||||
Optional.ofNullable(config.getAqlDownloadRequests()).orElse(Collections.emptyList()).stream()
|
||||
)
|
||||
.map(BaseDownloadRequest::getDownloadRequestType)
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Requests getConfig(String methodName) throws FileNotFoundException {
|
||||
logger.debug("Loading configuration for method: {}", methodName);
|
||||
Optional<Requests> optionalConfig = configLoader.loadConfigIfModified(methodName);
|
||||
|
||||
if (optionalConfig.isEmpty()) {
|
||||
throw new FileNotFoundException("Configuration for method " + methodName + " could not be loaded.");
|
||||
}
|
||||
|
||||
return optionalConfig.get();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
package org.micord.service;
|
||||
|
||||
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.utils.ConfigLoader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.Optional;
|
||||
|
||||
@Service
|
||||
public class ConfigService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ConfigService.class);
|
||||
|
||||
@Autowired
|
||||
private ConfigLoader configLoader;
|
||||
|
||||
public <T> T getConfig(ConfigType methodName, Class<T> configClass) throws FileNotFoundException {
|
||||
logger.debug("Loading configuration for method: {}", methodName);
|
||||
Optional<T> optionalConfig = configLoader.loadConfigIfModified(methodName, configClass);
|
||||
|
||||
if (optionalConfig.isEmpty()) {
|
||||
throw new FileNotFoundException("Configuration for method " + methodName + " could not be loaded.");
|
||||
}
|
||||
|
||||
return optionalConfig.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,133 +1,567 @@
|
|||
package org.micord.service;
|
||||
|
||||
import com.arangodb.ArangoCursor;
|
||||
import com.arangodb.ArangoDBException;
|
||||
import com.arangodb.ArangoDatabase;
|
||||
import com.arangodb.model.AqlQueryOptions;
|
||||
import org.micord.config.ArangoDBConnection;
|
||||
import org.micord.config.DatabaseConnection;
|
||||
import org.micord.models.DownloadRequest;
|
||||
import org.micord.models.RequestArgument;
|
||||
import org.micord.exceptions.NoDownloadReportRecordsException;
|
||||
import org.micord.models.SqlDownloadBuildQueryResponse;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.models.requests.downloads.AQLDownloadRequest;
|
||||
import org.micord.models.requests.downloads.BaseDownloadRequest;
|
||||
import org.micord.models.requests.downloads.SQLDownloadRequest;
|
||||
import org.micord.models.requests.RequestArgument;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
@Service
|
||||
public class DownloadService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DownloadService.class);
|
||||
private static final String REQUEST_WITH_ADDITIONAL_ID = "requestWithAdditionalId";
|
||||
|
||||
public File download(DownloadRequest selectedRequest, List<String> ids, LocalDate startDate, LocalDate endDate) {
|
||||
|
||||
return processDownloadRequest(selectedRequest, ids, startDate, endDate);
|
||||
public File download(BaseDownloadRequest selectedRequest, List<String> ids, RequestParameters parameters, Map<String, Boolean> validationResults) throws SQLException {
|
||||
LocalDate startDate = parameters.getStartDate();
|
||||
LocalDate endDate = parameters.getEndDate();
|
||||
if (selectedRequest instanceof SQLDownloadRequest) {
|
||||
return processSqlDownloadRequest((SQLDownloadRequest) selectedRequest, ids, startDate, endDate, validationResults);
|
||||
} else if (selectedRequest instanceof AQLDownloadRequest) {
|
||||
return processAqlDownloadRequest((AQLDownloadRequest) selectedRequest, ids, startDate, endDate, validationResults);
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported request type: " + selectedRequest.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
private File processDownloadRequest(DownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate) {
|
||||
Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate);
|
||||
private File processAqlDownloadRequest(AQLDownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults) throws SQLException {
|
||||
try {
|
||||
ArangoDatabase arangoDb = ArangoDBConnection.getConnection(request.getAqlConnectionParams());
|
||||
|
||||
Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED);
|
||||
Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED);
|
||||
|
||||
List<Map<String, Object>> entities = executeSelectAqlRequest(
|
||||
arangoDb,
|
||||
request.getDownloadRequestEntitySelectorQuery(),
|
||||
ids,
|
||||
startDate,
|
||||
endDate,
|
||||
emptyIdsAllowed,
|
||||
emptyDatesAllowed
|
||||
);
|
||||
|
||||
// решили отдавать пустой файл
|
||||
// if (entities.isEmpty()) {
|
||||
// logger.warn("No entities found for main AQL request.");
|
||||
// throw new NoDownloadReportRecordsException(
|
||||
// "Отчет не может быть сгенерирован. Нет записей в базе для успешной генерации."
|
||||
// );
|
||||
// }
|
||||
|
||||
return writeResultsToCsv(entities, request.getAqlDownloadLimit());
|
||||
|
||||
} catch (ArangoDBException e) {
|
||||
logger.error("Error connecting to ArangoDB or executing AQL query: {}", e.getMessage(), e);
|
||||
throw new SQLException("Ошибка работы с базой данных. Попробуйте позже.", e);
|
||||
} catch (NoDownloadReportRecordsException e) {
|
||||
logger.warn("No records available for report generation: {}", e.getMessage());
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
logger.error("Unexpected error occurred during report generation: {}", e.getMessage(), e);
|
||||
throw new RuntimeException("Произошла непредвиденная ошибка при генерации отчета.", e);
|
||||
}
|
||||
|
||||
// request.getAqlRequestCollections().forEach(collection -> {
|
||||
// String type = collection.getCollectionUrl();
|
||||
// String entityType;
|
||||
//
|
||||
// if (Objects.equals(type, "applications")) {
|
||||
// entityType = "applicationId";
|
||||
// } else {
|
||||
// entityType = type + "Id";
|
||||
// }
|
||||
//
|
||||
// Object entityIds = entities.get(entityType);
|
||||
//
|
||||
// if (entityIds instanceof String) {
|
||||
// entityIds = Collections.singletonList((String) entityIds);
|
||||
// }
|
||||
//
|
||||
// String aqlQuery = buildAqlQuery(type, ids, collection.getDateAttribute(), startDate, endDate, emptyIdsAllowed, emptyDatesAllowed);
|
||||
//
|
||||
// results.addAll(executeAqlQuery(arangoDb, aqlQuery, (List<String>) entityIds, startDate, endDate, emptyIdsAllowed, emptyDatesAllowed));
|
||||
// });
|
||||
|
||||
// return writeResultsToCsv(results);
|
||||
}
|
||||
|
||||
private File processSqlDownloadRequest(SQLDownloadRequest request,
|
||||
List<String> ids, LocalDate startDate, LocalDate endDate,
|
||||
Map<String, Boolean> validationResults) {
|
||||
List<File> allFiles = new ArrayList<>();
|
||||
int offset = 0;
|
||||
int pageIndex = 1;
|
||||
int limit = 600000;
|
||||
if (request.getRequestArgumentLimit() != null) {
|
||||
limit = request.getRequestArgumentLimit();
|
||||
}
|
||||
List<String[]> results = new ArrayList<>();
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
request.getSqlConnectionParams())) {
|
||||
String requestURL = (String) query.get("requestURL");
|
||||
|
||||
List<String[]> results = executeSqlQuery(connection, requestURL);
|
||||
|
||||
File csvFile = File.createTempFile("download-", ".csv");
|
||||
|
||||
try (PrintWriter writer = new PrintWriter(csvFile)) {
|
||||
for (String[] row : results) {
|
||||
writer.println(String.join(",", row));
|
||||
request.getSqlConnectionParams())) {
|
||||
Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate,
|
||||
validationResults, limit, offset
|
||||
);
|
||||
if (query.get(REQUEST_WITH_ADDITIONAL_ID) != null) {
|
||||
pageIndex--;
|
||||
SqlDownloadBuildQueryResponse response = (SqlDownloadBuildQueryResponse) query.get(REQUEST_WITH_ADDITIONAL_ID);
|
||||
String url = response.getSql();
|
||||
String paramName = response.getParamName();
|
||||
Collection<List<String>> pages = partitionList(response.getParams(), limit);
|
||||
url = url.replaceAll(";(?=[^;]*$)", " ") + " LIMIT " + limit + " OFFSET "
|
||||
+ offset + ";";
|
||||
for (List<String> page : pages) {
|
||||
pageIndex++;
|
||||
String resultSet = "(" + page.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
url = url.replace("${" + paramName + "}",
|
||||
resultSet
|
||||
);
|
||||
String newUrl = url;
|
||||
int newOffset = offset;
|
||||
while (true) {
|
||||
results = new ArrayList<>(executeSqlQuery(connection, newUrl));
|
||||
if (results.size() <= 1) {
|
||||
pageIndex--;
|
||||
break;
|
||||
}
|
||||
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
|
||||
allFiles.add(file);
|
||||
if (results.size() - 1 < limit) {
|
||||
break;
|
||||
}
|
||||
String oldPagination = " OFFSET " + newOffset + ";";
|
||||
newOffset += limit;
|
||||
String newPagination = " OFFSET " + newOffset + ";";
|
||||
newUrl = newUrl.replace(oldPagination, newPagination);
|
||||
pageIndex++;
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to write to CSV file", e);
|
||||
}
|
||||
else {
|
||||
String requestURL = (String) query.get("requestURL");
|
||||
while (true) {
|
||||
results = new ArrayList<>(executeSqlQuery(connection, requestURL));
|
||||
if (results.size() <= 1) {
|
||||
break;
|
||||
}
|
||||
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
|
||||
allFiles.add(file);
|
||||
if (results.size() - 1 < limit) {
|
||||
break;
|
||||
}
|
||||
String oldPagination = " OFFSET " + offset + ";";
|
||||
offset += limit;
|
||||
String newPagination = " OFFSET " + offset + ";";
|
||||
requestURL = requestURL.replace(oldPagination, newPagination);
|
||||
pageIndex++;
|
||||
|
||||
return csvFile;
|
||||
|
||||
}
|
||||
}
|
||||
if (allFiles.isEmpty()) {
|
||||
return writeSingleSqlCsvFile(results, "download-part 0", ".csv");
|
||||
}
|
||||
if (allFiles.size() == 1) {
|
||||
return allFiles.get(0);
|
||||
}
|
||||
else {
|
||||
return createZipArchive(allFiles, "download-");
|
||||
}
|
||||
}
|
||||
catch (SQLException | IOException e) {
|
||||
logger.error("SQL execution failed for query: {}", query, e);
|
||||
throw new RuntimeException("SQL execution failed for query " + request.getRequestURL()
|
||||
.replaceAll("password=\\S+", "password=***"), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Map<String, Object> buildSqlQuery(DownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate) {
|
||||
private File writeSingleSqlCsvFile(List<String[]> results, String prefix, String suffix) throws IOException {
|
||||
File csvFile = File.createTempFile(prefix, suffix);
|
||||
|
||||
try (PrintWriter writer = new PrintWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(csvFile), StandardCharsets.UTF_8))) {
|
||||
String lineSeparator = "\r\n";
|
||||
|
||||
for (String[] row : results) {
|
||||
writer.print(formatCsvRow(row));
|
||||
writer.print(lineSeparator);
|
||||
}
|
||||
}
|
||||
|
||||
return csvFile;
|
||||
}
|
||||
|
||||
private List<Map<String, Object>> executeSelectAqlRequest(ArangoDatabase arangoDb,
|
||||
String downloadRequestEntitySelectorQuery,
|
||||
List<String> ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) {
|
||||
List<Map<String, Object>> results = new ArrayList<>();
|
||||
|
||||
|
||||
try {
|
||||
Map<String, Object> bindVars = new HashMap<>();
|
||||
if (!emptyIdsAllowed && ids != null && !ids.isEmpty()) {
|
||||
bindVars.put("ids", ids);
|
||||
}
|
||||
if (!emptyDatesAllowed) {
|
||||
if (startDate != null) {
|
||||
bindVars.put("startDate", startDate.toString());
|
||||
}
|
||||
if (endDate != null) {
|
||||
bindVars.put("endDate", endDate.toString());
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Executing AQL query: {}\nWith bindVars: {}", downloadRequestEntitySelectorQuery, bindVars);
|
||||
|
||||
AqlQueryOptions aqlQueryOptions = new AqlQueryOptions();
|
||||
try (ArangoCursor<Map> cursor = arangoDb.query(downloadRequestEntitySelectorQuery, Map.class, bindVars, aqlQueryOptions)) {
|
||||
while (cursor.hasNext()) {
|
||||
results.add(cursor.next());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
} catch (ArangoDBException e) {
|
||||
logger.error("AQL query execution failed: {}\nError: {}", downloadRequestEntitySelectorQuery, e.getMessage(), e);
|
||||
}
|
||||
return results;
|
||||
// Map<String, Object> entities = new HashMap<>();
|
||||
//
|
||||
// Map<String, Object> bindVars = new HashMap<>();
|
||||
// if (!emptyIdsAllowed && ids != null && !ids.isEmpty()) {
|
||||
// bindVars.put("ids", ids);
|
||||
// }
|
||||
// if (!emptyDatesAllowed) {
|
||||
// if (startDate != null) {
|
||||
// bindVars.put("startDate", startDate.toString());
|
||||
// }
|
||||
// if (endDate != null) {
|
||||
// bindVars.put("endDate", endDate.toString());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// logger.info("Executing AQL query: {}\nWith bindVars: {}", aqlQuery, bindVars);
|
||||
//
|
||||
// AqlQueryOptions aqlQueryOptions = new AqlQueryOptions();
|
||||
//
|
||||
// try (ArangoCursor<Map> cursor = arangoDb.query(downloadRequestEntitySelectorQuery, Map.class, bindVars, aqlQueryOptions)) {
|
||||
// while (cursor.hasNext()) {
|
||||
// Map<String, Object> result = cursor.next();
|
||||
//
|
||||
// for (Map.Entry<String, Object> entry : result.entrySet()) {
|
||||
// String key = entry.getKey();
|
||||
// Object entityValue = entry.getValue();
|
||||
//
|
||||
// entities.put(key, entityValue);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// catch (Exception e) {
|
||||
// logger.error("Failed to execute AQL url", e);
|
||||
// }
|
||||
//
|
||||
// return entities;
|
||||
}
|
||||
|
||||
private String buildAqlQuery(String collectionName, List<String> ids, String dateAttribute, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) {
|
||||
StringBuilder queryBuilder = new StringBuilder();
|
||||
queryBuilder.append("FOR doc IN ").append(collectionName).append(" ");
|
||||
|
||||
List<String> conditions = new ArrayList<>();
|
||||
if (!emptyIdsAllowed && ids != null && !ids.isEmpty()) {
|
||||
conditions.add("doc._key IN @ids");
|
||||
}
|
||||
if (!emptyDatesAllowed && dateAttribute != null) {
|
||||
if (startDate != null) {
|
||||
conditions.add("doc." + dateAttribute + " >= @startDate");
|
||||
}
|
||||
if (endDate != null) {
|
||||
conditions.add("doc." + dateAttribute + " <= @endDate");
|
||||
}
|
||||
}
|
||||
|
||||
if (!conditions.isEmpty()) {
|
||||
queryBuilder.append("FILTER ").append(String.join(" AND ", conditions)).append(" ");
|
||||
}
|
||||
|
||||
queryBuilder.append("RETURN doc");
|
||||
|
||||
return queryBuilder.toString();
|
||||
}
|
||||
|
||||
private List<Map<String, Object>> executeAqlQuery(ArangoDatabase arangoDb, String aqlQuery, List<String> ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) {
|
||||
List<Map<String, Object>> results = new ArrayList<>();
|
||||
|
||||
|
||||
try {
|
||||
Map<String, Object> bindVars = new HashMap<>();
|
||||
if (!emptyIdsAllowed && ids != null && !ids.isEmpty()) {
|
||||
bindVars.put("ids", ids);
|
||||
}
|
||||
if (!emptyDatesAllowed) {
|
||||
if (startDate != null) {
|
||||
bindVars.put("startDate", startDate.toString());
|
||||
}
|
||||
if (endDate != null) {
|
||||
bindVars.put("endDate", endDate.toString());
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Executing AQL query: {}\nWith bindVars: {}", aqlQuery, bindVars);
|
||||
|
||||
AqlQueryOptions aqlQueryOptions = new AqlQueryOptions();
|
||||
try (ArangoCursor<Map> cursor = arangoDb.query(aqlQuery, Map.class, bindVars, aqlQueryOptions)) {
|
||||
while (cursor.hasNext()) {
|
||||
results.add(cursor.next());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
} catch (ArangoDBException e) {
|
||||
logger.error("AQL query execution failed: {}\nError: {}", aqlQuery, e.getMessage(), e);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private File writeResultsToCsv(List<Map<String, Object>> results, int limit) {
|
||||
try {
|
||||
// If results fit in a single file, create one CSV
|
||||
if (results.size() <= limit) {
|
||||
return writeSingleCsvFile(results, "arango-download-", ".csv");
|
||||
}
|
||||
|
||||
// Otherwise, create multiple CSV files and zip them
|
||||
List<File> csvFiles = new ArrayList<>();
|
||||
int fileIndex = 1;
|
||||
|
||||
for (int i = 0; i < results.size(); i += limit) {
|
||||
int endIndex = Math.min(i + limit, results.size());
|
||||
List<Map<String, Object>> chunk = results.subList(i, endIndex);
|
||||
|
||||
File csvFile = writeSingleCsvFile(chunk, "arango-download-part" + fileIndex + "-", ".csv");
|
||||
if (csvFile != null) {
|
||||
csvFiles.add(csvFile);
|
||||
fileIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
// Create ZIP archive
|
||||
return createZipArchive(csvFiles, "arango-download-");
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to write results to CSV", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private File writeSingleCsvFile(List<Map<String, Object>> results, String prefix, String suffix) throws IOException {
|
||||
File csvFile = File.createTempFile(prefix, suffix);
|
||||
|
||||
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile), StandardCharsets.UTF_8))) {
|
||||
if (!results.isEmpty()) {
|
||||
List<String> headers = new ArrayList<>(results.get(0).keySet());
|
||||
writer.write(String.join(",", headers));
|
||||
writer.newLine();
|
||||
|
||||
for (Map<String, Object> row : results) {
|
||||
List<String> rowValues = headers.stream()
|
||||
.map(header -> formatCsvField(row.get(header)))
|
||||
.collect(Collectors.toList());
|
||||
writer.write(String.join(",", rowValues));
|
||||
writer.newLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
return csvFile;
|
||||
}
|
||||
|
||||
private File createZipArchive(List<File> files, String prefix) throws IOException {
|
||||
File zipFile = File.createTempFile(prefix, ".zip");
|
||||
|
||||
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zipFile))) {
|
||||
for (File file : files) {
|
||||
addFileToZip(file, zos);
|
||||
// Delete temporary CSV files after adding to ZIP
|
||||
if (!file.delete()) {
|
||||
logger.warn("Failed to delete temporary file: {}", file.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return zipFile;
|
||||
}
|
||||
|
||||
private void addFileToZip(File file, ZipOutputStream zos) throws IOException {
|
||||
try (FileInputStream fis = new FileInputStream(file)) {
|
||||
ZipEntry zipEntry = new ZipEntry(file.getName());
|
||||
zos.putNextEntry(zipEntry);
|
||||
|
||||
byte[] buffer = new byte[8192];
|
||||
int length;
|
||||
while ((length = fis.read(buffer)) >= 0) {
|
||||
zos.write(buffer, 0, length);
|
||||
}
|
||||
|
||||
zos.closeEntry();
|
||||
}
|
||||
}
|
||||
|
||||
private String formatCsvField(Object value) {
|
||||
if (value == null) {
|
||||
return "\"\"";
|
||||
}
|
||||
String strValue = value.toString().replace("\"", "\"\"");
|
||||
return "\"" + strValue + "\"";
|
||||
}
|
||||
|
||||
private String formatCsvRow(String[] row) {
|
||||
StringBuilder formattedRow = new StringBuilder();
|
||||
|
||||
for (int i = 0; i < row.length; i++) {
|
||||
if (i > 0) {
|
||||
formattedRow.append(",");
|
||||
}
|
||||
|
||||
formattedRow.append("\"");
|
||||
formattedRow.append(escapeCsvField(row[i]));
|
||||
formattedRow.append("\"");
|
||||
}
|
||||
|
||||
return formattedRow.toString();
|
||||
}
|
||||
|
||||
private String escapeCsvField(String field) {
|
||||
if (field == null) {
|
||||
return "";
|
||||
}
|
||||
return field.replace("\"", "\"\"");
|
||||
}
|
||||
|
||||
private Map<String, Object> buildSqlQuery(SQLDownloadRequest request, List<String> ids,
|
||||
LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults, int limit,
|
||||
int offset) {
|
||||
Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED);
|
||||
Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED);
|
||||
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
String endpointArguments;
|
||||
String endpointArguments = "";
|
||||
|
||||
String requestURL = prepareRequestURL(request, startDate, endDate);
|
||||
String requestURL = request.getRequestURL();
|
||||
|
||||
if (ids == null || ids.isEmpty()) {
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("where id in ${endpointArguments}", ""));
|
||||
|
||||
return resultMap;
|
||||
if (!emptyDatesAllowed) {
|
||||
requestURL = prepareRequestURL(request.getRequestURL(), startDate, endDate);
|
||||
requestURL = requestURL.replace("${DB}", request.getSqlConnectionParams().getJdbcDatabase());
|
||||
}
|
||||
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments = "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
endpointArguments = "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
if (emptyIdsAllowed != null && emptyIdsAllowed) {
|
||||
requestURL = requestURL.replace("where id in ${endpointArguments}", "");
|
||||
}
|
||||
|
||||
else if (ids == null || ids.isEmpty()) {
|
||||
requestURL = requestURL.replace("where id in ${endpointArguments}", "");
|
||||
}
|
||||
else {
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments =
|
||||
"'{" + ids.stream().map(String::trim).collect(Collectors.joining(", ")) + "}'";
|
||||
}
|
||||
else {
|
||||
endpointArguments =
|
||||
"(" + ids.stream().map(s -> "'" + s.trim() + "'").collect(Collectors.joining(", "))
|
||||
+ ")";
|
||||
}
|
||||
requestURL = requestURL.replace("${endpointArguments}", endpointArguments);
|
||||
}
|
||||
Map<String, List<String>> params = new HashMap<>();
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
|
||||
if (argument.getRequestArgumentConnectionParams() != null) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
|
||||
resultMap.put("ids", result);
|
||||
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
String resultSet = "(" + result.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
|
||||
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
|
||||
|
||||
query = prepareRequestURL(query, startDate, endDate);
|
||||
query = query.replace("${endpointArguments}", endpointArguments);
|
||||
int subOffset = 0;
|
||||
List<String> aggregatedIds = new ArrayList<>();
|
||||
while (true) {
|
||||
String paginatedQuery = query.replaceAll(";(?=[^;]*$)", " ") + " LIMIT " + limit + " OFFSET " + subOffset;
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, paginatedQuery);
|
||||
if (result.isEmpty()) break;
|
||||
aggregatedIds.addAll(result);
|
||||
if (result.size() < limit) break;
|
||||
subOffset += limit;
|
||||
}
|
||||
|
||||
params.put(argument.getRequestArgumentName(), aggregatedIds);
|
||||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
throw new RuntimeException("Error executing database query: " + argument.getRequestArgumentURL(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!params.isEmpty()) {
|
||||
if (params.size() == 1) {
|
||||
Map.Entry<String, List<String>> entry = params.entrySet().iterator().next();
|
||||
String key = entry.getKey();
|
||||
List<String> value = entry.getValue();
|
||||
resultMap.put(REQUEST_WITH_ADDITIONAL_ID, new SqlDownloadBuildQueryResponse(requestURL, key, value));
|
||||
return resultMap;
|
||||
}
|
||||
else {
|
||||
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
|
||||
String resultSet = "(" + entry.getValue().stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
requestURL = requestURL.replace("${" + entry.getKey() + "}",
|
||||
resultSet
|
||||
);
|
||||
}
|
||||
requestURL = requestURL.replaceAll(";(?=[^;]*$)", " ") + " LIMIT " + limit + " OFFSET "
|
||||
+ offset + ";";
|
||||
}
|
||||
}
|
||||
else {
|
||||
requestURL = requestURL.replaceAll(";(?=[^;]*$)", " ") + " LIMIT " + limit + " OFFSET "
|
||||
+ offset + ";";
|
||||
}
|
||||
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("${endpointArguments}", endpointArguments));
|
||||
|
||||
resultMap.put("requestURL", requestURL);
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
private String prepareRequestURL(DownloadRequest request, LocalDate startDate,
|
||||
LocalDate endDate) {
|
||||
String requestURL = request.getRequestURL();
|
||||
|
||||
private String prepareRequestURL(String requestURL, LocalDate startDate,
|
||||
LocalDate endDate) {
|
||||
if (startDate != null) {
|
||||
requestURL = requestURL.replace("${startDate}", startDate.toString());
|
||||
}
|
||||
if (endDate != null) {
|
||||
requestURL = requestURL.replace("${endDate}", endDate.toString());
|
||||
}
|
||||
return requestURL.replace("${DB}", request.getSqlConnectionParams().getJdbcDatabase());
|
||||
return requestURL;
|
||||
}
|
||||
|
||||
private List<String[]> executeSqlQuery(Connection connection, String query) throws SQLException {
|
||||
|
|
@ -167,4 +601,10 @@ public class DownloadService {
|
|||
return results;
|
||||
}
|
||||
|
||||
private <T> Collection<List<T>> partitionList(List<T> list, int size) {
|
||||
return IntStream.range(0, (list.size() + size - 1) / size)
|
||||
.mapToObj(i -> list.subList(i * size, Math.min((i + 1) * size, list.size())))
|
||||
.map(ArrayList::new)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
package org.micord.service;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.StringReader;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
|
|
@ -8,9 +10,39 @@ import java.sql.Connection;
|
|||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.micord.config.ArangoDBConnection;
|
||||
import org.micord.config.DatabaseConnection;
|
||||
import org.micord.config.S3HttpConnection;
|
||||
import org.micord.enums.RequestArgumentType;
|
||||
import org.micord.models.requests.AqlRequest;
|
||||
import org.micord.models.requests.AqlRequestParameter;
|
||||
import org.micord.models.requests.BaseRequest;
|
||||
import org.micord.models.requests.RequestArgument;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.models.requests.Requests;
|
||||
import org.micord.models.requests.S3Request;
|
||||
import org.micord.models.requests.SqlRequest;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import com.arangodb.ArangoCursor;
|
||||
import com.arangodb.ArangoDBException;
|
||||
|
|
@ -18,20 +50,7 @@ import com.arangodb.ArangoDatabase;
|
|||
import com.arangodb.entity.StreamTransactionEntity;
|
||||
import com.arangodb.model.AqlQueryOptions;
|
||||
import com.arangodb.model.StreamTransactionOptions;
|
||||
import org.micord.config.ArangoDBConnection;
|
||||
import org.micord.config.DatabaseConnection;
|
||||
import org.micord.config.S3HttpConnection;
|
||||
import org.micord.enums.RequestArgumentType;
|
||||
import org.micord.models.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
*/
|
||||
@Service
|
||||
public class RequestService {
|
||||
|
||||
|
|
@ -40,26 +59,75 @@ public class RequestService {
|
|||
@Autowired
|
||||
private HttpClient httpClient;
|
||||
|
||||
public void processS3Requests(List<S3Request> s3Requests, List<String> ids) {
|
||||
logger.debug("Starting processing of S3 requests");
|
||||
if (s3Requests != null) {
|
||||
s3Requests.forEach(request -> {
|
||||
List<CompletableFuture<Void>> futures = ids.stream()
|
||||
.map(id -> CompletableFuture.runAsync(() -> processS3Request(request, id)))
|
||||
.toList();
|
||||
@Autowired
|
||||
private ValidationService validationService;
|
||||
|
||||
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
|
||||
.thenRun(() -> logger.info("Successfully processed all S3 requests."))
|
||||
.exceptionally(ex -> {
|
||||
logger.error("Failed to process S3 requests", ex);
|
||||
return null;
|
||||
});
|
||||
});
|
||||
private void processS3Request(S3Request request, RequestParameters parameters, Map<String, Boolean> validationResults) {
|
||||
logger.info("B. Starting processing of single S3 request");
|
||||
try {
|
||||
List<String> files = new ArrayList<>();
|
||||
List<String> ids = parameters.getIds();
|
||||
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
|
||||
Map<String, Object> query = buildSqlQueryForS3(argument.getRequestArgumentURL(), parameters, validationResults);
|
||||
logger.info("C. Calling query {} for ids {}: ", query.get("requestURL"), ids);
|
||||
logger.debug("Starting fetching paths from database for S3 request");
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, (String) query.get("requestURL"));
|
||||
String formattedResult = IntStream.range(0, result.size())
|
||||
.mapToObj(i -> (i + 1) + ". " + result.get(i))
|
||||
.collect(Collectors.joining("\n"));
|
||||
|
||||
logger.info("D. Found files for query {}:\n{}", query.get("requestURL"), formattedResult);
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
files.addAll(result);
|
||||
}
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("Paths fetched in {} ms", endExecTime - startExecTime);
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument: {}", argument.getRequestArgumentURL(), e);
|
||||
throw new RuntimeException("Database query error for argument: " + argument.getRequestArgumentURL(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String formattedFiles = IntStream.range(0, files.size())
|
||||
.mapToObj(i -> (i + 1) + ". " + files.get(i))
|
||||
.collect(Collectors.joining("\n"));
|
||||
|
||||
logger.info("E. Found files for ids {}:\n{}", ids, formattedFiles);
|
||||
|
||||
if (files.isEmpty()) {
|
||||
logger.warn("No files found for S3 request {}", request);
|
||||
} else {
|
||||
|
||||
for (String file : files) {
|
||||
logger.info("F. Starting query S3 for file: {}", file);
|
||||
try {
|
||||
processFileForS3Request(request, file);
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("Error processing file: {}", file, e);
|
||||
throw e; // Rethrow to propagate for exception handling
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process S3 request: {}", request, e);
|
||||
throw e; // Rethrow exception to propagate to the handler
|
||||
}
|
||||
}
|
||||
|
||||
private void processS3Request(S3Request request, String id) {
|
||||
logger.debug("Starting processing of S3 request for id: {}", id);
|
||||
private void processS3Request(S3Request request, List<String> ids, Map<String, Boolean> validationResults) {
|
||||
logger.info("B. Starting processing of single S3 request");
|
||||
Boolean emptyIdsAllowed = validationResults.getOrDefault(ValidationService.IS_EMPTY_IDS_ALLOWED, false);
|
||||
try {
|
||||
List<String> files = new ArrayList<>();
|
||||
|
||||
|
|
@ -67,87 +135,208 @@ public class RequestService {
|
|||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
|
||||
String requestURL = argument.getRequestArgumentURL();
|
||||
if (!emptyIdsAllowed) {
|
||||
Map<String, Object> sqlQueryForS3 = buildSqlQueryForS3(argument.getRequestArgumentURL(), ids);
|
||||
requestURL = (String) sqlQueryForS3.get("requestURL");
|
||||
}
|
||||
logger.info("C. Calling query {} for ids {}: ", requestURL, ids);
|
||||
logger.debug("Starting fetching paths from database for S3 request");
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, requestURL);
|
||||
String formattedResult = IntStream.range(0, result.size())
|
||||
.mapToObj(i -> (i + 1) + ". " + result.get(i))
|
||||
.collect(Collectors.joining("\n"));
|
||||
|
||||
logger.info("D. Found files for query {}:\n{}", requestURL, formattedResult);
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
files.addAll(result);
|
||||
}
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("Paths fetched in {} ms", endExecTime - startExecTime);
|
||||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument: {}", argument.getRequestArgumentURL(), e);
|
||||
throw new RuntimeException("Database query error for argument: " + argument.getRequestArgumentURL(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.forEach(file -> {
|
||||
HttpRequest httpRequest;
|
||||
logger.debug("Starting building HTTP request for S3 request");
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
try {
|
||||
httpRequest = S3HttpConnection.buildHttpRequest(request, file);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("HTTP request built in {} ms", endExecTime - startExecTime);
|
||||
String formattedFiles = IntStream.range(0, files.size())
|
||||
.mapToObj(i -> (i + 1) + ". " + files.get(i))
|
||||
.collect(Collectors.joining("\n"));
|
||||
|
||||
httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString())
|
||||
.thenAccept(response -> {
|
||||
if (response.statusCode() == HttpURLConnection.HTTP_NO_CONTENT
|
||||
|| response.statusCode() == HttpURLConnection.HTTP_OK) {
|
||||
logger.info("Successfully deleted object for ID {}", id);
|
||||
}
|
||||
else {
|
||||
logger.error("Failed to delete object for ID {}. Response code: {}", id,
|
||||
response.statusCode()
|
||||
);
|
||||
}
|
||||
})
|
||||
.exceptionally(ex -> {
|
||||
logger.error("Failed to delete object for ID {}", id, ex);
|
||||
return null;
|
||||
});
|
||||
});
|
||||
logger.info("E. Found files for ids {}:\n{}", ids, formattedFiles);
|
||||
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Failed to process S3 request for id: {}", id, e);
|
||||
if (files.isEmpty()) {
|
||||
logger.warn("No files found for S3 request {}", request);
|
||||
} else {
|
||||
|
||||
for (String file : files) {
|
||||
logger.info("F. Starting query S3 for file: {}", file);
|
||||
try {
|
||||
processFileForS3Request(request, file);
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("Error processing file: {}", file, e);
|
||||
throw e; // Rethrow to propagate for exception handling
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process S3 request: {}", request, e);
|
||||
throw e; // Rethrow exception to propagate to the handler
|
||||
}
|
||||
}
|
||||
|
||||
private void processFileForS3Request(S3Request request, String file) {
|
||||
if (file == null || file.isBlank()) {
|
||||
logger.warn("Skipping invalid file path: {}", file);
|
||||
throw new RuntimeException("Invalid file path");
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug("Starting building HTTP request for file: {}", file);
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
HttpRequest httpRequest = S3HttpConnection.buildHttpRequest(request, file);
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("HTTP request built in {} ms for file: {}", endExecTime - startExecTime, file);
|
||||
|
||||
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||
if (response.statusCode() == HttpURLConnection.HTTP_NO_CONTENT || response.statusCode() == HttpURLConnection.HTTP_OK) {
|
||||
logger.info("Successfully deleted object {}", file);
|
||||
} else {
|
||||
handleErrorResponse(response, file);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Error sending HTTP request for file: {}", file, e);
|
||||
throw new RuntimeException("HTTP request error for file: " + file, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void handleErrorResponse(HttpResponse<String> response, String file) {
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
InputSource is = new InputSource(new StringReader(response.body()));
|
||||
Document doc = builder.parse(is);
|
||||
Element root = doc.getDocumentElement();
|
||||
String message = root.getElementsByTagName("Message").item(0).getTextContent();
|
||||
logger.error("Failed to delete object {}. Response code: {}. Error message: {}", file, response.statusCode(), message);
|
||||
throw new RuntimeException("Failed to delete object: " + file);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to parse error response for file {}", file, e);
|
||||
throw new RuntimeException("Error parsing HTTP response: " + response.body(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Transactional
|
||||
public void processSqlAndAqlRequests(Requests config, List<String> ids) {
|
||||
public void processSqlAndAqlRequests(Requests config, List<String> ids) throws SQLException, FileNotFoundException {
|
||||
logger.debug("Starting transactional processing of requests");
|
||||
if (config.getS3Requests() != null && !config.getS3Requests().isEmpty()) {
|
||||
logger.info("A. Starting processing of S3 requests");
|
||||
|
||||
for (S3Request request : config.getS3Requests()) {
|
||||
Map<String, Boolean> validationResults = validationService.validateRequest(request, ids);
|
||||
processS3Request(request, ids, validationResults);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.getSqlRequests() != null) {
|
||||
for (SqlRequest request : config.getSqlRequests()) {
|
||||
processSqlRequests(request, ids);
|
||||
Map<String, Boolean> validationResults = validationService.validateRequest(request, ids);
|
||||
processSqlRequests(request, ids, validationResults);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.getAqlRequests() != null) {
|
||||
for (AqlRequest request : config.getAqlRequests()) {
|
||||
processAqlRequests(request, ids);
|
||||
Map<String, Boolean> validationResults = validationService.validateRequest(request, ids);
|
||||
processAqlRequests(request, ids, validationResults);
|
||||
}
|
||||
}
|
||||
processS3Requests(config.getS3Requests(), ids);
|
||||
}
|
||||
|
||||
private void processSqlRequests(SqlRequest request, List<String> ids) {
|
||||
@Transactional
|
||||
public void processSqlAndAqlRequests(Requests config, RequestParameters parameters) throws SQLException, FileNotFoundException {
|
||||
logger.debug("Starting transactional processing of requests");
|
||||
if (config.getS3Requests() != null && !config.getS3Requests().isEmpty()) {
|
||||
|
||||
logger.info("A. Starting processing of S3 requests");
|
||||
|
||||
for (S3Request request : config.getS3Requests()) {
|
||||
Map<String, Boolean> validationResults = validationService.validateMilitaryNoticeRequest(request, parameters);
|
||||
processS3Request(request, parameters, validationResults);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.getSqlRequests() != null) {
|
||||
for (SqlRequest request : config.getSqlRequests()) {
|
||||
Map<String, Boolean> validationResults = validationService.validateMilitaryNoticeRequest(request, parameters);
|
||||
processSqlRequests(request, parameters, validationResults);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.getAqlRequests() != null) {
|
||||
for (AqlRequest request : config.getAqlRequests()) {
|
||||
Map<String, Boolean> validationResults = validationService.validateMilitaryNoticeRequest(request, parameters);
|
||||
processAqlRequests(request, parameters.getIds(), validationResults);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void processSqlRequests(SqlRequest request, List<String> ids, Map<String, Boolean> validationResults) {
|
||||
logger.debug("Starting transactional processing of SQL requests");
|
||||
Map<String, Object> query = buildSqlQuery(request, ids);
|
||||
logger.debug("Opening connection for SQL Request: {}", request.getRequestURL());
|
||||
Boolean emptyIdsAllowed = validationResults.getOrDefault(ValidationService.IS_EMPTY_IDS_ALLOWED, false);
|
||||
String requestURL;
|
||||
Map<String, Object> query = null;
|
||||
|
||||
if (emptyIdsAllowed != null && emptyIdsAllowed) {
|
||||
requestURL = request.getRequestURL();
|
||||
logger.info("Empty IDs allowed. Using original request URL: {}", requestURL);
|
||||
} else {
|
||||
query = buildSqlQuery(request, ids);
|
||||
requestURL = (String) query.get("requestURL");
|
||||
}
|
||||
|
||||
logger.debug("Opening connection for SQL Request: {}", request.getRequestURL().replaceAll("password=\\S+", "password=***"));
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
try (Connection connection = DatabaseConnection.getConnection(request.getSqlConnectionParams())) {
|
||||
executeSqlQuery(connection, requestURL);
|
||||
|
||||
if (query != null) {
|
||||
List<String> queryIds = (List<String>) query.get("ids");
|
||||
if (queryIds != null && !queryIds.isEmpty()) {
|
||||
ids.addAll(queryIds);
|
||||
} else {
|
||||
logger.warn("No IDs found for the query");
|
||||
}
|
||||
}
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL request executed in {} ms", endExecTime - startExecTime);
|
||||
logger.info("Successfully executed query {} for IDs: ({})", requestURL, String.join(", ", ids));
|
||||
} catch (SQLException e) {
|
||||
logger.error("SQL execution failed for query: {}", requestURL, e);
|
||||
throw new RuntimeException("Error executing SQL query", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void processSqlRequests(SqlRequest request, RequestParameters parameters, Map<String, Boolean> validationResults) {
|
||||
logger.debug("Starting transactional processing of SQL requests");
|
||||
Map<String, Object> query = buildSqlQuery(request, parameters, validationResults);
|
||||
List<String> ids = parameters.getIds();
|
||||
logger.debug("Opening connection for SQL Request: {}", request.getRequestURL().replaceAll("password=\\S+", "password=***"));
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
request.getSqlConnectionParams())) {
|
||||
String requestURL = (String) query.get("requestURL");
|
||||
executeSqlQuery(connection, requestURL);
|
||||
executeSqlQuery(connection, requestURL);
|
||||
|
||||
List<String> queryIds = (List<String>) query.get("ids");
|
||||
List<String> queryIds = (List<String>) query.get("ids");
|
||||
if (queryIds != null && !queryIds.isEmpty()) {
|
||||
ids.addAll(queryIds);
|
||||
} else {
|
||||
|
|
@ -160,66 +349,193 @@ public class RequestService {
|
|||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("SQL execution failed for query: {}", query, e);
|
||||
throw new RuntimeException("Error executing SQL query", e);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> buildSqlQuery(SqlRequest request, List<String> ids) {
|
||||
logger.debug("Starting building SQL query for request: {}", request.getRequestURL());
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
String endpointArguments;
|
||||
private Map<String, Object> buildSqlQueryForS3(String requestURL, List<String> ids) {
|
||||
logger.debug("Starting building SQL query for request: {}", requestURL);
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
String endpointArguments;
|
||||
|
||||
String requestURL = request.getRequestURL();
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments = "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
endpointArguments = "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
}
|
||||
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments = "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
endpointArguments = "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("${endpointArguments}", endpointArguments));
|
||||
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL query for S3 built in {} ms", endExecTime - startExecTime);
|
||||
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
private Map<String, Object> buildSqlQueryForS3(String url, RequestParameters parameters, Map<String, Boolean> validationResults) {
|
||||
logger.debug("Starting building SQL query for request: {}", url);
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
|
||||
if (argument.getRequestArgumentConnectionParams() != null) {
|
||||
logger.debug("Opening connection for SQL RequestArgument: {}", argument.getRequestArgumentName());
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
Boolean isEmptyDatesAllowed = validationResults.getOrDefault(ValidationService.IS_EMPTY_DATES_ALLOWED, false);
|
||||
|
||||
resultMap.put("ids", result);
|
||||
String requestURL = applyDateFilter(url, parameters.getStartDate(), parameters.getEndDate(), isEmptyDatesAllowed);
|
||||
if (isEmptyDatesAllowed) {
|
||||
logger.info("Skipping date filtering as empty dates are allowed.");
|
||||
}
|
||||
|
||||
String finalUrl = applyEndpointArguments(requestURL, parameters.getIds());
|
||||
resultMap.put("requestURL", finalUrl);
|
||||
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL query for S3 built in {} ms", endExecTime - startExecTime);
|
||||
|
||||
return Collections.unmodifiableMap(resultMap);
|
||||
}
|
||||
|
||||
private String applyDateFilter(String url, LocalDate startDate, LocalDate endDate, boolean skipFiltering) {
|
||||
if (!skipFiltering) {
|
||||
return prepareDatesFilterInRequestURL(url, startDate, endDate);
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
private String applyEndpointArguments(String requestURL, List<String> ids) {
|
||||
String endpointArguments = formatEndpointArguments(requestURL, ids);
|
||||
return requestURL.replace("${endpointArguments}", endpointArguments);
|
||||
}
|
||||
|
||||
private String formatEndpointArguments(String requestURL, List<String> ids) {
|
||||
if (requestURL.contains(":=")) {
|
||||
return "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
return "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
}
|
||||
}
|
||||
|
||||
private String prepareDatesFilterInRequestURL(String requestURL, LocalDate startDate, LocalDate endDate) {
|
||||
|
||||
if (startDate != null) {
|
||||
requestURL = requestURL.replace("${startDate}", startDate.toString());
|
||||
}
|
||||
|
||||
if (endDate != null) {
|
||||
requestURL = requestURL.replace("${endDate}", endDate.toString());
|
||||
}
|
||||
|
||||
return requestURL;
|
||||
}
|
||||
|
||||
private Map<String, Object> buildSqlQuery(BaseRequest request, List<String> ids) {
|
||||
logger.debug("Starting building SQL query for request: {}", request.getRequestURL().replaceAll("password=\\S+", "password=***"));
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
String endpointArguments;
|
||||
|
||||
String requestURL = request.getRequestURL();
|
||||
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments = "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
endpointArguments = "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
}
|
||||
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
|
||||
if (argument.getRequestArgumentConnectionParams() != null) {
|
||||
logger.debug("Opening connection for SQL RequestArgument: {}", argument.getRequestArgumentName());
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
|
||||
resultMap.put("ids", result);
|
||||
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
String resultSet = "(" + result.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
if (result != null && !result.isEmpty()) {
|
||||
String resultSet = "(" + result.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
|
||||
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
|
||||
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("${endpointArguments}", endpointArguments));
|
||||
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL query built in {} ms", endExecTime - startExecTime);
|
||||
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("${DB}", request.getSqlConnectionParams().getJdbcDatabase())
|
||||
.replace("${endpointArguments}", endpointArguments));
|
||||
private Map<String, Object> buildSqlQuery(BaseRequest request, RequestParameters parameters, Map<String, Boolean> validationResults) {
|
||||
logger.debug("Starting building SQL query for request: {}", request.getRequestURL().replaceAll("password=\\S+", "password=***"));
|
||||
long startExecTime = System.currentTimeMillis();
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL query built in {} ms", endExecTime - startExecTime);
|
||||
Boolean isEmptyDatesAllowed = validationResults.getOrDefault(ValidationService.IS_EMPTY_DATES_ALLOWED, false);
|
||||
|
||||
return resultMap;
|
||||
}
|
||||
String requestURL = applyDateFilter(request.getRequestURL(), parameters.getStartDate(), parameters.getEndDate(), isEmptyDatesAllowed);
|
||||
if (isEmptyDatesAllowed) {
|
||||
logger.info("Skipping date filtering as empty dates are allowed.");
|
||||
}
|
||||
List<String> ids = parameters.getIds();
|
||||
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
if (argument.getRequestArgumentConnectionParams() != null) {
|
||||
logger.debug("Opening connection for SQL RequestArgument: {}", argument.getRequestArgumentName());
|
||||
try (Connection connection = DatabaseConnection.getConnection(argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
resultMap.put("ids", result);
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
String resultSet = "(" + result.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String finalUrl = applyEndpointArguments(requestURL, ids);
|
||||
resultMap.put("requestURL", finalUrl);
|
||||
|
||||
long endExecTime = System.currentTimeMillis();
|
||||
logger.debug("SQL query built in {} ms", endExecTime - startExecTime);
|
||||
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
|
||||
private boolean executeSqlQuery(Connection connection, String query) throws SQLException {
|
||||
|
|
@ -240,31 +556,53 @@ private Map<String, Object> buildSqlQuery(SqlRequest request, List<String> ids)
|
|||
return results;
|
||||
}
|
||||
|
||||
private void processAqlRequests(AqlRequest request, List<String> ids) {
|
||||
private void processAqlRequests(AqlRequest request, List<String> ids, Map<String, Boolean> validationResults) {
|
||||
ArangoDatabase arangoDb = ArangoDBConnection.getConnection(request.getAqlConnectionParams());
|
||||
|
||||
// TODO: implement for multiple request arguments
|
||||
RequestArgument requestArgument = request.getRequestArguments().get(0);
|
||||
List<String> aqlCollectionRead = request.getReadCollections().stream()
|
||||
.map(AqlRequestCollection::getCollectionName)
|
||||
.toList();
|
||||
String aqlCollectionWrite = request.getWriteCollections().stream()
|
||||
.map(AqlRequestCollection::getCollectionName)
|
||||
.findFirst().orElseGet(null);
|
||||
|
||||
List<String> collectionNames = new ArrayList<>();
|
||||
|
||||
request.getAqlRequestCollections().forEach(collection -> {
|
||||
collectionNames.add(collection.getType());
|
||||
});
|
||||
StreamTransactionEntity tx = null;
|
||||
try {
|
||||
StreamTransactionOptions options = new StreamTransactionOptions()
|
||||
.writeCollections(aqlCollectionWrite)
|
||||
.readCollections(aqlCollectionRead.toArray(new String[0]));
|
||||
.writeCollections(collectionNames.toArray(new String[0]))
|
||||
.readCollections(collectionNames.toArray(new String[0]));
|
||||
|
||||
tx = arangoDb.beginStreamTransaction(options);
|
||||
String transactionId = tx.getId();
|
||||
|
||||
logger.info("Stream transaction started with ID: {}", transactionId);
|
||||
|
||||
Map<String, Object> entities = executeSelectAqlRequest(arangoDb, aqlCollectionWrite, requestArgument, ids, transactionId);
|
||||
executeMainAqlRequest(arangoDb, aqlCollectionWrite, request.getRequestURL(), entities, transactionId);
|
||||
Map<String, Object> entities = executeSelectAqlRequest(arangoDb, request, requestArgument, ids, transactionId, validationResults);
|
||||
|
||||
if (entities.isEmpty()) {
|
||||
logger.warn("No entities found for main AQL request.");
|
||||
return;
|
||||
}
|
||||
|
||||
request.getAqlRequestCollections().forEach(collection -> {
|
||||
String type = collection.getType();
|
||||
String entityType;
|
||||
|
||||
if (Objects.equals(type, "applications")) {
|
||||
entityType = "applicationId";
|
||||
} else {
|
||||
entityType = type + "Id";
|
||||
}
|
||||
|
||||
Object entityIds = entities.get(entityType);
|
||||
|
||||
if (entityIds instanceof String) {
|
||||
entityIds = Collections.singletonList((String) entityIds);
|
||||
}
|
||||
|
||||
executeMainAqlRequest(arangoDb, entityIds, collection.getCollectionUrl(), type, transactionId);
|
||||
});
|
||||
|
||||
arangoDb.commitStreamTransaction(transactionId);
|
||||
logger.info("Stream transaction with ID {} committed successfully", transactionId);
|
||||
|
|
@ -282,20 +620,53 @@ private Map<String, Object> buildSqlQuery(SqlRequest request, List<String> ids)
|
|||
}
|
||||
|
||||
private Map<String, Object> executeSelectAqlRequest(ArangoDatabase arangoDb,
|
||||
String aqlCollectionWrite,
|
||||
RequestArgument requestArgument,
|
||||
List<String> ids, String transactionId) {
|
||||
AqlRequest request,
|
||||
RequestArgument requestArgument,
|
||||
List<String> ids, String transactionId, Map<String, Boolean> validationResults) {
|
||||
Map<String, Object> entities = new HashMap<>();
|
||||
|
||||
String url = requestArgument.getRequestArgumentURL();
|
||||
RequestArgumentType type = requestArgument.getType();
|
||||
|
||||
if (type == RequestArgumentType.AQL) {
|
||||
Boolean emptyIdsAllowed = validationResults.getOrDefault(ValidationService.IS_EMPTY_IDS_ALLOWED, false);
|
||||
|
||||
Map<String, Object> bindVars = new HashMap<>();
|
||||
bindVars.put("ids", ids);
|
||||
|
||||
if (!emptyIdsAllowed) bindVars.put("ids", ids);
|
||||
|
||||
if (request.getAqlRequestParameters() != null) {
|
||||
for (AqlRequestParameter parameter : request.getAqlRequestParameters().getParameters()) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(parameter.getSqlConnectionParams())) {
|
||||
String sqlQuery = parameter.getAqlRequestParameterURL();
|
||||
if (sqlQuery.contains("${endpointArguments}")) {
|
||||
String inClause = ids.stream()
|
||||
.map(id -> "'" + id + "'")
|
||||
.collect(Collectors.joining(", "));
|
||||
sqlQuery = sqlQuery.replace("${endpointArguments}", "(" + inClause + ")");
|
||||
}
|
||||
|
||||
logger.info("Executing SQL query: {}", sqlQuery);
|
||||
List<String> parameterValues = fetchFileListFromDatabaseSQL(connection, sqlQuery);
|
||||
if (parameterValues != null && !parameterValues.isEmpty()) {
|
||||
logger.info("Parameter type: {}; values: {}", parameter.getType(), parameterValues.get(0));
|
||||
} else {
|
||||
logger.info("No values found for parameter type: {}", parameter.getType());
|
||||
}
|
||||
bindVars.put(parameter.getType(), parameterValues);
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to fetch parameter values for type: {}", parameter.getType(), e);
|
||||
throw new RuntimeException("Failed to execute SQL query: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AqlQueryOptions aqlQueryOptions = new AqlQueryOptions().streamTransactionId(transactionId);
|
||||
|
||||
bindVars.forEach((key, value) -> {
|
||||
logger.info("Key: {}; Value: {}", key, value);
|
||||
});
|
||||
|
||||
try (ArangoCursor<Map> cursor = arangoDb.query(url, Map.class, bindVars, aqlQueryOptions)) {
|
||||
while (cursor.hasNext()) {
|
||||
Map<String, Object> result = cursor.next();
|
||||
|
|
@ -311,41 +682,25 @@ private Map<String, Object> buildSqlQuery(SqlRequest request, List<String> ids)
|
|||
catch (Exception e) {
|
||||
logger.error("Failed to execute AQL url", e);
|
||||
}
|
||||
} else if (type == RequestArgumentType.SQL) {
|
||||
if (requestArgument.getRequestArgumentConnectionParams() != null) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
requestArgument.getRequestArgumentConnectionParams())) {
|
||||
String query = requestArgument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
|
||||
entities.put(aqlCollectionWrite, result);
|
||||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entities;
|
||||
}
|
||||
|
||||
private void executeMainAqlRequest(ArangoDatabase arangoDb, String aqlCollectionWrite, String requestURL,
|
||||
Map<String, Object> entities, String transactionId) {
|
||||
if (entities == null || entities.isEmpty()) {
|
||||
logger.warn("No entities found for main AQL request.");
|
||||
return;
|
||||
}
|
||||
private void executeMainAqlRequest(ArangoDatabase arangoDb, Object entityIds, String requestURL, String type, String transactionId) {
|
||||
|
||||
|
||||
Map<String, Object> bindVars = new HashMap<>();
|
||||
// TODO: verify correctness of received entities and compare keys
|
||||
Object writeEntity = entities.get(aqlCollectionWrite);
|
||||
bindVars.put("ids", entities);
|
||||
bindVars.put("ids", entityIds);
|
||||
|
||||
AqlQueryOptions aqlQueryOptions = new AqlQueryOptions().streamTransactionId(transactionId);
|
||||
|
||||
arangoDb.query(requestURL, null, bindVars, aqlQueryOptions);
|
||||
|
||||
logger.info("Successfully removed {}: {}", aqlCollectionWrite, writeEntity);
|
||||
try {
|
||||
arangoDb.query(requestURL, null, bindVars, aqlQueryOptions);
|
||||
logger.info("Successfully removed {} for ids: {}", type, entityIds);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to execute AQL request: {}", e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,213 @@
|
|||
package org.micord.service;
|
||||
|
||||
import org.micord.config.DatabaseConnection;
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.exceptions.IllegalRequestParametersException;
|
||||
import org.micord.exceptions.ValidationException;
|
||||
import org.micord.models.requests.BaseRequest;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.models.requests.RequestValidationRules;
|
||||
import org.micord.models.requests.downloads.BaseDownloadRequest;
|
||||
import org.micord.models.validations.ValidationRule;
|
||||
import org.micord.models.validations.ValidationRules;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
public class ValidationService {
|
||||
|
||||
public static final String IS_EMPTY_DATES_ALLOWED = "isEmptyDatesAllowed";
|
||||
public static final String IS_EMPTY_IDS_ALLOWED = "isEmptyIdsAllowed";
|
||||
|
||||
@Autowired
|
||||
private ConfigService configService;
|
||||
|
||||
public Map<String, Map<String, Object>> validate(List<String> ids, ValidationRule rule) throws SQLException {
|
||||
|
||||
String query = rule.getRequestURL();
|
||||
if (!query.contains("${endpointArguments}")) {
|
||||
throw new IllegalArgumentException("The query must contain the placeholder '${endpointArguments}' for ID replacement.");
|
||||
}
|
||||
|
||||
String finalQuery = query.replace("${endpointArguments}", "(" + ids.stream().map(id -> "?").collect(Collectors.joining(", ")) + ")");
|
||||
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
rule.getSqlConnectionParams());
|
||||
PreparedStatement preparedStatement = connection.prepareStatement(finalQuery)) {
|
||||
|
||||
for (int i = 0; i < ids.size(); i++) {
|
||||
preparedStatement.setObject(i + 1, UUID.fromString(ids.get(i)));
|
||||
}
|
||||
|
||||
try (ResultSet resultSet = preparedStatement.executeQuery()) {
|
||||
Map<String, Map<String, Object>> validationResults = new HashMap<>();
|
||||
|
||||
while (resultSet.next()) {
|
||||
String id = resultSet.getString(rule.getIdColumn());
|
||||
Map<String, Object> columnValues = new HashMap<>();
|
||||
|
||||
for (String column : rule.getValidationColumns()) {
|
||||
Object value = resultSet.getObject(column);
|
||||
columnValues.put(column, value);
|
||||
}
|
||||
validationResults.put(id, columnValues);
|
||||
}
|
||||
|
||||
return validationResults;
|
||||
} catch (SQLException e) {
|
||||
throw new SQLException("Failed to execute query for ValidationRule", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, Boolean> validateRequest(BaseRequest request, List<String> ids) throws ValidationException, SQLException, FileNotFoundException {
|
||||
RequestValidationRules requestValidationRules = new RequestValidationRules();
|
||||
Map<String, Boolean> emptyIdsRules = getRulesForEmptyIds(request, ids, requestValidationRules);
|
||||
|
||||
boolean isIdsFormatted = (request.getRequestValidationRules() != null)
|
||||
? request.getRequestValidationRules().getIsIdsFormatted()
|
||||
: requestValidationRules.getIsIdsFormatted();
|
||||
|
||||
if (isIdsFormatted) {
|
||||
validateByValidationConfig(ids, BaseRequest.class);
|
||||
}
|
||||
|
||||
return emptyIdsRules;
|
||||
}
|
||||
|
||||
public Map<String, Boolean> validateMilitaryNoticeRequest(BaseRequest request, RequestParameters parameters) throws ValidationException, SQLException, FileNotFoundException {
|
||||
RequestValidationRules requestValidationRules = new RequestValidationRules();
|
||||
List<String> ids = parameters.getIds();
|
||||
Map<String, Boolean> emptyIdsRules = getRulesForEmptyIds(request, ids, requestValidationRules);
|
||||
Map<String, Boolean> emptyDatesRules = getRulesForEmptyDates(request, parameters);
|
||||
|
||||
validateByValidationConfig(ids, RequestParameters.class);
|
||||
|
||||
Map<String, Boolean> merged = new HashMap<>();
|
||||
merged.putAll(emptyIdsRules);
|
||||
merged.putAll(emptyDatesRules);
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
public Map<String, Boolean> validateDownloadRequest(BaseDownloadRequest request, RequestParameters downloadRequest, List<String> ids) throws ValidationException {
|
||||
RequestValidationRules requestValidationRules = new RequestValidationRules();
|
||||
Map<String, Boolean> emptyIdsRules = getRulesForEmptyIds(request, ids, requestValidationRules);
|
||||
Map<String, Boolean> emptyDatesRules = getRulesForEmptyDates(request, downloadRequest);
|
||||
|
||||
Map<String, Boolean> merged = new HashMap<>();
|
||||
merged.putAll(emptyIdsRules);
|
||||
merged.putAll(emptyDatesRules);
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
private static Map<String, Boolean> getRulesForEmptyIds(BaseRequest request, List<String> ids, RequestValidationRules defaultRules) {
|
||||
|
||||
if (request.getRequestValidationRules() == null && defaultRules.getIsEmptyIdsAllowed()) {
|
||||
return Map.of(ValidationService.IS_EMPTY_IDS_ALLOWED, true);
|
||||
}
|
||||
|
||||
if (request.getRequestValidationRules() != null && request.getRequestValidationRules().getIsEmptyIdsAllowed()) {
|
||||
return Map.of(ValidationService.IS_EMPTY_IDS_ALLOWED, true);
|
||||
}
|
||||
|
||||
if (ids == null || ids.isEmpty()) {
|
||||
throw new IllegalRequestParametersException("eks.error.argument.missing");
|
||||
}
|
||||
|
||||
boolean isIdsFormatted = (request.getRequestValidationRules() != null && request.getRequestValidationRules().getIsIdsFormatted())
|
||||
|| (request.getRequestValidationRules() == null && defaultRules.getIsIdsFormatted());
|
||||
|
||||
if (!isIdsFormatted) {
|
||||
return Map.of(ValidationService.IS_EMPTY_IDS_ALLOWED, false);
|
||||
}
|
||||
|
||||
String uuidRegex = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$";
|
||||
boolean invalidIdFound = ids.stream().anyMatch(id -> !id.matches(uuidRegex));
|
||||
|
||||
if (!invalidIdFound) {
|
||||
return Map.of(ValidationService.IS_EMPTY_IDS_ALLOWED, false);
|
||||
}
|
||||
|
||||
throw new IllegalRequestParametersException("eks.error.argument.invalid");
|
||||
|
||||
}
|
||||
|
||||
private static <R extends BaseRequest, T extends RequestParameters> Map<String, Boolean> getRulesForEmptyDates(R request, T parameters) {
|
||||
Boolean emptyDatesAllowed = isEmptyDatesAllowed(request, parameters);
|
||||
|
||||
if (!emptyDatesAllowed && parameters.getStartDate() != null && parameters.getEndDate() != null) {
|
||||
if (parameters.getStartDate().isAfter(parameters.getEndDate())) {
|
||||
throw new IllegalArgumentException("eks.error.date.order");
|
||||
}
|
||||
}
|
||||
|
||||
return Map.of(ValidationService.IS_EMPTY_DATES_ALLOWED, emptyDatesAllowed);
|
||||
}
|
||||
|
||||
private static <T extends RequestParameters> Boolean isEmptyDatesAllowed(BaseRequest request, T parameters) {
|
||||
|
||||
Boolean emptyDatesAllowed = request.getRequestValidationRules() == null
|
||||
? Boolean.FALSE
|
||||
: request.getRequestValidationRules().getIsEmptyDatesAllowed();
|
||||
|
||||
LocalDate startDate = parameters.getStartDate();
|
||||
LocalDate endDate = parameters.getEndDate();
|
||||
|
||||
if (Boolean.FALSE.equals(emptyDatesAllowed) && (startDate == null || endDate == null)) {
|
||||
throw new IllegalRequestParametersException("eks.error.date.empty");
|
||||
}
|
||||
|
||||
return emptyDatesAllowed;
|
||||
|
||||
}
|
||||
|
||||
public <T> Map<String, String> validateByValidationConfig(List<String> ids, T c) throws ValidationException, FileNotFoundException, SQLException {
|
||||
|
||||
ValidationRules config = configService.getConfig(ConfigType.VALIDATE_BLOCK, ValidationRules.class);
|
||||
|
||||
if (config.getValidationRules() == null || config.getValidationRules().isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Map<String, Map<String, Object>> validationResults = new HashMap<>();
|
||||
|
||||
for (ValidationRule rule : config.getValidationRules()) {
|
||||
validationResults.putAll(validate(ids, rule));
|
||||
}
|
||||
|
||||
Map<String, String> invalidRecords = new HashMap<>();
|
||||
|
||||
validationResults.forEach((id, columnValues) -> {
|
||||
List<String> invalidColumns = columnValues.entrySet().stream()
|
||||
.filter(entry -> Boolean.FALSE.equals(entry.getValue()))
|
||||
.map(Map.Entry::getKey)
|
||||
.toList();
|
||||
|
||||
if (!invalidColumns.isEmpty()) {
|
||||
String message = "Запись " + id + " имеет не отменённые временные меры или подписанные повестки";
|
||||
invalidRecords.put(id, message);
|
||||
}
|
||||
});
|
||||
|
||||
if (!invalidRecords.isEmpty()) {
|
||||
throw new ValidationException("Validation failed for some records", invalidRecords);
|
||||
}
|
||||
|
||||
return invalidRecords;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,13 @@
|
|||
package org.micord.utils;
|
||||
|
||||
import jakarta.xml.bind.JAXBContext;
|
||||
import jakarta.xml.bind.JAXBException;
|
||||
import jakarta.xml.bind.Unmarshaller;
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.models.CachedConfig;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
|
@ -10,14 +18,6 @@ import java.util.Optional;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import jakarta.xml.bind.JAXBContext;
|
||||
import jakarta.xml.bind.JAXBException;
|
||||
import jakarta.xml.bind.Unmarshaller;
|
||||
|
||||
import org.micord.models.CachedConfig;
|
||||
import org.micord.models.Requests;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
/**
|
||||
* @author Maksim Tereshin
|
||||
|
|
@ -31,7 +31,9 @@ public class ConfigLoader {
|
|||
@Value("${configDirectory}")
|
||||
private String configDirectory;
|
||||
|
||||
public Optional<Requests> loadConfigIfModified(String methodName) {
|
||||
|
||||
public <T> Optional<T> loadConfigIfModified(ConfigType configType, Class<T> configClass) {
|
||||
String methodName = configType.getType();
|
||||
String fileName = methodName + ".xml";
|
||||
|
||||
if (configDirectory == null) {
|
||||
|
|
@ -47,14 +49,14 @@ public class ConfigLoader {
|
|||
|
||||
if (cachedConfig == null || !currentModifiedTime.equals(cachedConfig.getModifiedTime())) {
|
||||
// Load the updated configuration
|
||||
JAXBContext jaxbContext = JAXBContext.newInstance(Requests.class);
|
||||
JAXBContext jaxbContext = JAXBContext.newInstance(configClass);
|
||||
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
|
||||
Requests loadedConfig = (Requests) unmarshaller.unmarshal(configFile);
|
||||
T loadedConfig = unmarshalConfig(configFile, configClass);
|
||||
cachedConfigs.put(methodName, new CachedConfig(loadedConfig, currentModifiedTime));
|
||||
return Optional.of(loadedConfig);
|
||||
}
|
||||
else {
|
||||
return Optional.of(cachedConfigs.get(methodName).getConfig());
|
||||
return (Optional<T>) Optional.of(cachedConfigs.get(methodName).getConfig());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -67,4 +69,10 @@ public class ConfigLoader {
|
|||
return Optional.empty(); // Return empty if unmarshalling fails
|
||||
}
|
||||
}
|
||||
|
||||
private <T> T unmarshalConfig(File configFile, Class<T> configClass) throws JAXBException {
|
||||
JAXBContext jaxbContext = JAXBContext.newInstance(configClass);
|
||||
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
|
||||
return configClass.cast(unmarshaller.unmarshal(configFile));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1,8 @@
|
|||
configDirectory: C:\work\ervu-eks\config\cde-xml
|
||||
spring:
|
||||
mvc:
|
||||
servlet:
|
||||
load-on-startup: 1
|
||||
logging:
|
||||
level:
|
||||
root: info
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
com.atomikos.icatch.max_timeout = 1800000
|
||||
com.atomikos.icatch.default_jta_timeout=1800000
|
||||
|
|
@ -0,0 +1,283 @@
|
|||
package org.micord.controller;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.micord.enums.ConfigType;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.service.ApiService;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDate;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ApiControllerTest {
|
||||
|
||||
@Mock
|
||||
private ApiService apiService;
|
||||
|
||||
@InjectMocks
|
||||
private ApiController apiController;
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
private RequestParameters testRequest;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
testRequest = new RequestParameters();
|
||||
testRequest.setType("test-download-type");
|
||||
testRequest.setIds(Arrays.asList("id1", "id2", "id3"));
|
||||
testRequest.setStartDate(LocalDate.of(2024, 1, 1));
|
||||
testRequest.setEndDate(LocalDate.of(2024, 12, 31));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDownloadCSV_SingleCsvFile() throws Exception {
|
||||
// Given: Service returns a single CSV file
|
||||
File csvFile = createTempCsvFile("test-download.csv");
|
||||
when(apiService.download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class)))
|
||||
.thenReturn(csvFile);
|
||||
|
||||
// When: Calling downloadCSV endpoint
|
||||
ResponseEntity<Resource> response = apiController.downloadCSV(testRequest);
|
||||
|
||||
// Then: Response should have CSV content type and headers
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getHeaders().getContentType())
|
||||
.isEqualTo(MediaType.parseMediaType("text/csv; charset=UTF-8"));
|
||||
assertThat(response.getHeaders().getFirst(HttpHeaders.CONTENT_DISPOSITION))
|
||||
.startsWith("attachment; filename=")
|
||||
.endsWith(".csv");
|
||||
assertThat(response.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING))
|
||||
.isEqualTo("UTF-8");
|
||||
assertThat(response.getHeaders().getContentLength()).isEqualTo(csvFile.length());
|
||||
|
||||
verify(apiService).download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDownloadCSV_ZipFile() throws Exception {
|
||||
// Given: Service returns a ZIP file (for large datasets)
|
||||
File zipFile = createTempZipFile("test-download.zip");
|
||||
when(apiService.download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class)))
|
||||
.thenReturn(zipFile);
|
||||
|
||||
// When: Calling downloadCSV endpoint
|
||||
ResponseEntity<Resource> response = apiController.downloadCSV(testRequest);
|
||||
|
||||
// Then: Response should have ZIP content type and headers
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getHeaders().getContentType())
|
||||
.isEqualTo(MediaType.APPLICATION_OCTET_STREAM);
|
||||
assertThat(response.getHeaders().getFirst(HttpHeaders.CONTENT_DISPOSITION))
|
||||
.startsWith("attachment; filename=")
|
||||
.endsWith(".zip");
|
||||
// Content-Encoding should NOT be set for ZIP files
|
||||
assertThat(response.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING))
|
||||
.isNull();
|
||||
assertThat(response.getHeaders().getContentLength()).isEqualTo(zipFile.length());
|
||||
|
||||
verify(apiService).download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDownloadCSV_HandlesIOException() throws Exception {
|
||||
// Given: Service throws IOException
|
||||
when(apiService.download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class)))
|
||||
.thenThrow(new IOException("Test IO error"));
|
||||
|
||||
// When/Then: Should propagate the exception
|
||||
assertThrows(IOException.class, () -> {
|
||||
apiController.downloadCSV(testRequest);
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDownloadCSV_EmptyRequest() throws Exception {
|
||||
// Given: Empty request parameters
|
||||
RequestParameters emptyRequest = new RequestParameters();
|
||||
emptyRequest.setType("empty-type");
|
||||
|
||||
File csvFile = createTempCsvFile("empty.csv");
|
||||
when(apiService.download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class)))
|
||||
.thenReturn(csvFile);
|
||||
|
||||
// When: Calling with empty request
|
||||
ResponseEntity<Resource> response = apiController.downloadCSV(emptyRequest);
|
||||
|
||||
// Then: Should still process successfully
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isNotNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testListDownloadTypes() throws Exception {
|
||||
// Given: Service returns list of download types
|
||||
List<String> downloadTypes = Arrays.asList(
|
||||
"TYPE_1", "TYPE_2", "TYPE_3", "TYPE_4"
|
||||
);
|
||||
when(apiService.getDownloadTypes(ConfigType.DOWNLOAD_CSV))
|
||||
.thenReturn(downloadTypes);
|
||||
|
||||
// When: Calling listDownloadTypes endpoint
|
||||
ResponseEntity<List<String>> response = apiController.listDownloadTypes();
|
||||
|
||||
// Then: Should return the list
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isEqualTo(downloadTypes);
|
||||
assertThat(response.getBody()).hasSize(4);
|
||||
|
||||
verify(apiService).getDownloadTypes(ConfigType.DOWNLOAD_CSV);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRemoveMilitaryDraftNotices() throws Exception {
|
||||
// Given: Request with IDs
|
||||
RequestParameters request = new RequestParameters();
|
||||
request.setIds(Arrays.asList("notice1", "notice2"));
|
||||
|
||||
doNothing().when(apiService).process(eq(ConfigType.REMOVE_MILITARY_DRAFT_NOTICES), any(RequestParameters.class));
|
||||
|
||||
// When: Calling removeMilitaryDraftNotices
|
||||
ResponseEntity<?> response = apiController.removeMilitaryDraftNotices(request);
|
||||
|
||||
// Then: Should return success message
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isEqualTo("Операция \"Удаление повесток\" завершена успешно.");
|
||||
|
||||
verify(apiService).process(eq(ConfigType.REMOVE_MILITARY_DRAFT_NOTICES), any(RequestParameters.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeleteFiles() throws Exception {
|
||||
// Given: List of file IDs
|
||||
List<String> fileIds = Arrays.asList("file1", "file2", "file3");
|
||||
|
||||
doNothing().when(apiService).process(eq(ConfigType.DELETE_FILES), eq(fileIds));
|
||||
|
||||
// When: Calling deleteFiles
|
||||
ResponseEntity<?> response = apiController.deleteFiles(fileIds);
|
||||
|
||||
// Then: Should return success message
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isEqualTo("Операция \"Удаление файлов\" завершена успешно.");
|
||||
|
||||
verify(apiService).process(ConfigType.DELETE_FILES, fileIds);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBlock() throws Exception {
|
||||
// Given: List of IDs to block
|
||||
List<String> ids = Arrays.asList("user1", "user2");
|
||||
|
||||
doNothing().when(apiService).process(eq(ConfigType.BLOCK), eq(ids));
|
||||
|
||||
// When: Calling block
|
||||
ResponseEntity<?> response = apiController.block(ids);
|
||||
|
||||
// Then: Should return success message
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isEqualTo("Операция \"Блокировка\" завершена успешно.");
|
||||
|
||||
verify(apiService).process(ConfigType.BLOCK, ids);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testUnblock() throws Exception {
|
||||
// Given: List of IDs to unblock
|
||||
List<String> ids = Arrays.asList("user1", "user2");
|
||||
|
||||
doNothing().when(apiService).process(eq(ConfigType.UNBLOCK), eq(ids));
|
||||
|
||||
// When: Calling unblock
|
||||
ResponseEntity<?> response = apiController.unblock(ids);
|
||||
|
||||
// Then: Should return success message
|
||||
assertThat(response.getStatusCodeValue()).isEqualTo(200);
|
||||
assertThat(response.getBody()).isEqualTo("Операция \"Разблокировка\" завершена успешно.");
|
||||
|
||||
verify(apiService).process(ConfigType.UNBLOCK, ids);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testContentTypeDetection() throws Exception {
|
||||
// Test various file extensions
|
||||
testFileContentType("data.csv", MediaType.parseMediaType("text/csv; charset=UTF-8"));
|
||||
testFileContentType("archive.zip", MediaType.APPLICATION_OCTET_STREAM);
|
||||
testFileContentType("report.CSV", MediaType.parseMediaType("text/csv; charset=UTF-8"));
|
||||
testFileContentType("bundle.ZIP", MediaType.APPLICATION_OCTET_STREAM);
|
||||
}
|
||||
|
||||
private void testFileContentType(String filename, MediaType expectedType) throws Exception {
|
||||
// Create file with specific extension
|
||||
File file = tempDir.resolve(filename).toFile();
|
||||
file.createNewFile();
|
||||
|
||||
when(apiService.download(eq(ConfigType.DOWNLOAD_CSV), any(RequestParameters.class)))
|
||||
.thenReturn(file);
|
||||
|
||||
ResponseEntity<Resource> response = apiController.downloadCSV(testRequest);
|
||||
|
||||
assertThat(response.getHeaders().getContentType()).isEqualTo(expectedType);
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
private File createTempCsvFile(String filename) throws IOException {
|
||||
File csvFile = Files.createTempFile(tempDir, filename.replace(".csv", ""), ".csv").toFile();
|
||||
|
||||
try (FileOutputStream fos = new FileOutputStream(csvFile)) {
|
||||
String csvContent = "header1,header2,header3\n" +
|
||||
"value1,value2,value3\n" +
|
||||
"value4,value5,value6\n";
|
||||
fos.write(csvContent.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
return csvFile;
|
||||
}
|
||||
|
||||
private File createTempZipFile(String filename) throws IOException {
|
||||
File zipFile = Files.createTempFile(tempDir, filename.replace(".zip", ""), ".zip").toFile();
|
||||
|
||||
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zipFile))) {
|
||||
// Add first CSV
|
||||
ZipEntry entry1 = new ZipEntry("part1.csv");
|
||||
zos.putNextEntry(entry1);
|
||||
zos.write("header1,header2\ndata1,data2\n".getBytes(StandardCharsets.UTF_8));
|
||||
zos.closeEntry();
|
||||
|
||||
// Add second CSV
|
||||
ZipEntry entry2 = new ZipEntry("part2.csv");
|
||||
zos.putNextEntry(entry2);
|
||||
zos.write("header1,header2\ndata3,data4\n".getBytes(StandardCharsets.UTF_8));
|
||||
zos.closeEntry();
|
||||
}
|
||||
|
||||
return zipFile;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,262 @@
|
|||
package org.micord.service;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DownloadServiceTest {
|
||||
|
||||
@InjectMocks
|
||||
private DownloadService downloadService;
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
private static final int MAX_ROWS_PER_CSV = 600000;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
downloadService = new DownloadService();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSingleCsvFileWhenRowsUnderLimit() throws Exception {
|
||||
// Given: Dataset with less than 600k rows
|
||||
List<Map<String, Object>> testData = generateTestData(100000);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Should create single CSV file, not ZIP
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).endsWith(".csv");
|
||||
assertThat(result.getName()).doesNotEndWith(".zip");
|
||||
|
||||
// Verify CSV content
|
||||
List<String> lines = Files.readAllLines(result.toPath());
|
||||
// Header + 100k data rows
|
||||
assertThat(lines).hasSize(100001);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMultipleCsvFilesWhenRowsOverLimit() throws Exception {
|
||||
// Given: Dataset with more than 600k rows (1.5 million)
|
||||
List<Map<String, Object>> testData = generateTestData(1500000);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Should create ZIP file
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).endsWith(".zip");
|
||||
|
||||
// Verify ZIP contains 3 CSV files (600k + 600k + 300k)
|
||||
try (ZipFile zipFile = new ZipFile(result)) {
|
||||
assertThat(Collections.list(zipFile.entries())).hasSize(3);
|
||||
|
||||
zipFile.entries().asIterator().forEachRemaining(entry -> {
|
||||
assertThat(entry.getName()).endsWith(".csv");
|
||||
assertThat(entry.getName()).contains("part");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExactlyMaxRowsPerCsvFile() throws Exception {
|
||||
// Given: Exactly 600k rows
|
||||
List<Map<String, Object>> testData = generateTestData(MAX_ROWS_PER_CSV);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Should create single CSV file
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).endsWith(".csv");
|
||||
|
||||
List<String> lines = Files.readAllLines(result.toPath());
|
||||
assertThat(lines).hasSize(MAX_ROWS_PER_CSV + 1); // +1 for header
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChunkingPreservesHeaders() throws Exception {
|
||||
// Given: Dataset that requires chunking
|
||||
List<Map<String, Object>> testData = generateTestData(1200000);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Each CSV in ZIP should have headers
|
||||
try (ZipFile zipFile = new ZipFile(result)) {
|
||||
zipFile.entries().asIterator().forEachRemaining(entry -> {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(zipFile.getInputStream(entry)))) {
|
||||
String firstLine = reader.readLine();
|
||||
// Verify header exists
|
||||
assertThat(firstLine).contains("column1,column2,column3");
|
||||
} catch (IOException e) {
|
||||
fail("Failed to read ZIP entry: " + e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEmptyResultsHandling() throws Exception {
|
||||
// Given: Empty dataset
|
||||
List<Map<String, Object>> testData = new ArrayList<>();
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Should create empty CSV file
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).endsWith(".csv");
|
||||
|
||||
List<String> lines = Files.readAllLines(result.toPath());
|
||||
assertThat(lines).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testLargeDatasetChunking() throws Exception {
|
||||
// Given: 2 million rows (should create 4 files)
|
||||
List<Map<String, Object>> testData = generateTestData(2000000);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Should create ZIP with 4 CSV files
|
||||
assertThat(result.getName()).endsWith(".zip");
|
||||
|
||||
try (ZipFile zipFile = new ZipFile(result)) {
|
||||
List<? extends ZipEntry> entries = Collections.list(zipFile.entries());
|
||||
assertThat(entries).hasSize(4);
|
||||
|
||||
// Verify file names are sequential
|
||||
assertThat(entries.get(0).getName()).contains("part1");
|
||||
assertThat(entries.get(1).getName()).contains("part2");
|
||||
assertThat(entries.get(2).getName()).contains("part3");
|
||||
assertThat(entries.get(3).getName()).contains("part4");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCsvFieldFormatting() throws Exception {
|
||||
// Given: Data with special characters that need escaping
|
||||
List<Map<String, Object>> testData = new ArrayList<>();
|
||||
// Use LinkedHashMap to preserve field order
|
||||
Map<String, Object> row = new LinkedHashMap<>();
|
||||
row.put("normal", "value");
|
||||
row.put("withQuote", "value\"with\"quotes");
|
||||
row.put("withComma", "value,with,commas");
|
||||
row.put("withNewline", "value\nwith\nnewlines");
|
||||
row.put("nullValue", null);
|
||||
testData.add(row);
|
||||
|
||||
// When: Writing results to CSV
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Fields should be properly escaped
|
||||
String content = Files.readString(result.toPath());
|
||||
// Verify proper escaping
|
||||
assertThat(content).contains("\"value\"\"with\"\"quotes\"");
|
||||
assertThat(content).contains("\"value,with,commas\"");
|
||||
assertThat(content).contains("\"\""); // null value
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSqlResultsToSingleFile() throws Exception {
|
||||
// Given: SQL results under the limit
|
||||
List<String[]> sqlResults = generateSqlTestData(50000);
|
||||
|
||||
// When: Writing SQL results to CSV
|
||||
File result = invokeWriteSingleSqlCsvFile(sqlResults, "test-", ".csv");
|
||||
|
||||
// Then: Should create single CSV file
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getName()).startsWith("test-");
|
||||
assertThat(result.getName()).endsWith(".csv");
|
||||
|
||||
List<String> lines = Files.readAllLines(result.toPath());
|
||||
assertThat(lines).hasSize(50000);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testZipFileContentsIntegrity() throws Exception {
|
||||
// Given: Large dataset
|
||||
List<Map<String, Object>> testData = generateTestData(1200000);
|
||||
|
||||
// When: Creating ZIP file
|
||||
File result = invokeWriteResultsToCsv(testData);
|
||||
|
||||
// Then: Verify total row count across all files in ZIP
|
||||
int totalRows = 0;
|
||||
try (ZipFile zipFile = new ZipFile(result)) {
|
||||
for (ZipEntry entry : Collections.list(zipFile.entries())) {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(zipFile.getInputStream(entry)))) {
|
||||
// Count lines, excluding header
|
||||
long lines = reader.lines().count() - 1;
|
||||
totalRows += lines;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(totalRows).isEqualTo(1200000);
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
private List<Map<String, Object>> generateTestData(int rows) {
|
||||
List<Map<String, Object>> data = new ArrayList<>();
|
||||
for (int i = 0; i < rows; i++) {
|
||||
// Use LinkedHashMap to preserve order
|
||||
Map<String, Object> row = new LinkedHashMap<>();
|
||||
row.put("column1", "value" + i);
|
||||
row.put("column2", i);
|
||||
row.put("column3", "test" + i);
|
||||
data.add(row);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
private List<String[]> generateSqlTestData(int rows) {
|
||||
List<String[]> data = new ArrayList<>();
|
||||
// Add header
|
||||
data.add(new String[]{"column1", "column2", "column3"});
|
||||
// Add data rows
|
||||
for (int i = 0; i < rows - 1; i++) {
|
||||
data.add(new String[]{"value" + i, String.valueOf(i), "test" + i});
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
// Reflection-based invocation of private methods for testing
|
||||
private File invokeWriteResultsToCsv(List<Map<String, Object>> results) throws Exception {
|
||||
java.lang.reflect.Method method = DownloadService.class.getDeclaredMethod(
|
||||
"writeResultsToCsv", List.class, int.class);
|
||||
method.setAccessible(true);
|
||||
return (File) method.invoke(downloadService, results, 600000);
|
||||
}
|
||||
|
||||
private File invokeWriteSingleSqlCsvFile(List<String[]> results, String prefix, String suffix) throws Exception {
|
||||
java.lang.reflect.Method method = DownloadService.class.getDeclaredMethod(
|
||||
"writeSingleSqlCsvFile", List.class, String.class, String.class);
|
||||
method.setAccessible(true);
|
||||
return (File) method.invoke(downloadService, results, prefix, suffix);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,200 @@
|
|||
package org.micord.utils;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* Utility class for file operations in tests.
|
||||
*/
|
||||
public class FileTestUtils {
|
||||
|
||||
/**
|
||||
* Count the number of lines in a CSV file
|
||||
*/
|
||||
public static long countCsvLines(File csvFile) throws IOException {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(new FileInputStream(csvFile), StandardCharsets.UTF_8))) {
|
||||
return reader.lines().count();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read all lines from a CSV file
|
||||
*/
|
||||
public static List<String> readCsvLines(File csvFile) throws IOException {
|
||||
return Files.readAllLines(csvFile.toPath(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse CSV line (simple implementation for testing)
|
||||
*/
|
||||
public static String[] parseCsvLine(String line) {
|
||||
List<String> result = new ArrayList<>();
|
||||
StringBuilder current = new StringBuilder();
|
||||
boolean inQuotes = false;
|
||||
|
||||
for (int i = 0; i < line.length(); i++) {
|
||||
char c = line.charAt(i);
|
||||
|
||||
if (c == '"') {
|
||||
if (i + 1 < line.length() && line.charAt(i + 1) == '"') {
|
||||
current.append('"');
|
||||
i++; // Skip next quote
|
||||
} else {
|
||||
inQuotes = !inQuotes;
|
||||
}
|
||||
} else if (c == ',' && !inQuotes) {
|
||||
result.add(current.toString());
|
||||
current = new StringBuilder();
|
||||
} else {
|
||||
current.append(c);
|
||||
}
|
||||
}
|
||||
|
||||
result.add(current.toString());
|
||||
return result.toArray(new String[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify CSV file structure
|
||||
*/
|
||||
public static void verifyCsvStructure(File csvFile, int expectedColumns) throws IOException {
|
||||
List<String> lines = readCsvLines(csvFile);
|
||||
assertThat(lines).isNotEmpty();
|
||||
|
||||
// Check header
|
||||
String[] header = parseCsvLine(lines.get(0));
|
||||
assertThat(header).hasSize(expectedColumns);
|
||||
|
||||
// Check data rows have same number of columns
|
||||
for (int i = 1; i < Math.min(10, lines.size()); i++) {
|
||||
String[] row = parseCsvLine(lines.get(i));
|
||||
assertThat(row).hasSize(expectedColumns);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total rows across all CSV files in a ZIP
|
||||
*/
|
||||
public static long countTotalRowsInZip(File zipFile) throws IOException {
|
||||
long totalRows = 0;
|
||||
|
||||
try (ZipFile zip = new ZipFile(zipFile)) {
|
||||
for (ZipEntry entry : java.util.Collections.list(zip.entries())) {
|
||||
if (entry.getName().endsWith(".csv")) {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(zip.getInputStream(entry), StandardCharsets.UTF_8))) {
|
||||
// Count lines excluding header
|
||||
long lines = reader.lines().count() - 1;
|
||||
totalRows += lines;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return totalRows;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all CSV files from a ZIP to a temporary directory
|
||||
*/
|
||||
public static List<File> extractCsvFromZip(File zipFile, Path tempDir) throws IOException {
|
||||
List<File> extractedFiles = new ArrayList<>();
|
||||
|
||||
try (ZipFile zip = new ZipFile(zipFile)) {
|
||||
for (ZipEntry entry : java.util.Collections.list(zip.entries())) {
|
||||
if (entry.getName().endsWith(".csv")) {
|
||||
Path outputPath = tempDir.resolve(entry.getName());
|
||||
|
||||
try (InputStream is = zip.getInputStream(entry);
|
||||
OutputStream os = Files.newOutputStream(outputPath)) {
|
||||
byte[] buffer = new byte[8192];
|
||||
int len;
|
||||
while ((len = is.read(buffer)) > 0) {
|
||||
os.write(buffer, 0, len);
|
||||
}
|
||||
}
|
||||
|
||||
extractedFiles.add(outputPath.toFile());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return extractedFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that each CSV file in ZIP has headers
|
||||
*/
|
||||
public static void verifyZipCsvHeaders(File zipFile, String[] expectedHeaders) throws IOException {
|
||||
try (ZipFile zip = new ZipFile(zipFile)) {
|
||||
for (ZipEntry entry : java.util.Collections.list(zip.entries())) {
|
||||
if (entry.getName().endsWith(".csv")) {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(zip.getInputStream(entry), StandardCharsets.UTF_8))) {
|
||||
String firstLine = reader.readLine();
|
||||
assertThat(firstLine).isNotNull();
|
||||
|
||||
String[] headers = parseCsvLine(firstLine);
|
||||
assertThat(headers).containsExactly(expectedHeaders);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a temporary CSV file with test data
|
||||
*/
|
||||
public static File createTempCsvFile(List<String[]> data, Path tempDir) throws IOException {
|
||||
File csvFile = Files.createTempFile(tempDir, "test-", ".csv").toFile();
|
||||
|
||||
try (PrintWriter writer = new PrintWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(csvFile), StandardCharsets.UTF_8))) {
|
||||
for (String[] row : data) {
|
||||
writer.println(formatCsvRow(row));
|
||||
}
|
||||
}
|
||||
|
||||
return csvFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a row for CSV output
|
||||
*/
|
||||
private static String formatCsvRow(String[] row) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
for (int i = 0; i < row.length; i++) {
|
||||
if (i > 0) {
|
||||
result.append(",");
|
||||
}
|
||||
result.append("\"");
|
||||
result.append(row[i] != null ? row[i].replace("\"", "\"\"") : "");
|
||||
result.append("\"");
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file size in MB
|
||||
*/
|
||||
public static double getFileSizeInMB(File file) {
|
||||
return file.length() / (1024.0 * 1024.0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify file is within size limits
|
||||
*/
|
||||
public static void verifyFileSize(File file, double maxSizeMB) {
|
||||
double sizeMB = getFileSizeInMB(file);
|
||||
assertThat(sizeMB).isLessThanOrEqualTo(maxSizeMB);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,196 @@
|
|||
package org.micord.utils;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
/**
|
||||
* Utility class for generating test data for download service tests.
|
||||
*/
|
||||
public class TestDataGenerator {
|
||||
|
||||
private static final String[] FIRST_NAMES = {
|
||||
"John", "Jane", "Michael", "Sarah", "David", "Emma", "Robert", "Lisa", "William", "Mary"
|
||||
};
|
||||
|
||||
private static final String[] LAST_NAMES = {
|
||||
"Smith", "Johnson", "Williams", "Brown", "Jones", "Garcia", "Miller", "Davis", "Rodriguez", "Martinez"
|
||||
};
|
||||
|
||||
private static final String[] CITIES = {
|
||||
"New York", "Los Angeles", "Chicago", "Houston", "Phoenix", "Philadelphia", "San Antonio", "San Diego"
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a list of maps representing CSV data rows
|
||||
*/
|
||||
public static List<Map<String, Object>> generateMapData(int rows) {
|
||||
List<Map<String, Object>> data = new ArrayList<>();
|
||||
Random random = new Random();
|
||||
|
||||
for (int i = 0; i < rows; i++) {
|
||||
Map<String, Object> row = new HashMap<>();
|
||||
row.put("id", UUID.randomUUID().toString());
|
||||
row.put("firstName", FIRST_NAMES[random.nextInt(FIRST_NAMES.length)]);
|
||||
row.put("lastName", LAST_NAMES[random.nextInt(LAST_NAMES.length)]);
|
||||
row.put("age", 18 + random.nextInt(60));
|
||||
row.put("city", CITIES[random.nextInt(CITIES.length)]);
|
||||
row.put("email", "user" + i + "@example.com");
|
||||
row.put("phone", generatePhoneNumber());
|
||||
row.put("registrationDate", generateRandomDate());
|
||||
row.put("isActive", random.nextBoolean());
|
||||
row.put("balance", String.format("%.2f", random.nextDouble() * 10000));
|
||||
|
||||
data.add(row);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate SQL-style array data (including headers)
|
||||
*/
|
||||
public static List<String[]> generateSqlData(int rows, boolean includeHeaders) {
|
||||
List<String[]> data = new ArrayList<>();
|
||||
Random random = new Random();
|
||||
|
||||
if (includeHeaders) {
|
||||
data.add(new String[]{
|
||||
"id", "firstName", "lastName", "age", "city",
|
||||
"email", "phone", "registrationDate", "isActive", "balance"
|
||||
});
|
||||
}
|
||||
|
||||
for (int i = 0; i < rows; i++) {
|
||||
String[] row = new String[]{
|
||||
UUID.randomUUID().toString(),
|
||||
FIRST_NAMES[random.nextInt(FIRST_NAMES.length)],
|
||||
LAST_NAMES[random.nextInt(LAST_NAMES.length)],
|
||||
String.valueOf(18 + random.nextInt(60)),
|
||||
CITIES[random.nextInt(CITIES.length)],
|
||||
"user" + i + "@example.com",
|
||||
generatePhoneNumber(),
|
||||
generateRandomDate().toString(),
|
||||
String.valueOf(random.nextBoolean()),
|
||||
String.format("%.2f", random.nextDouble() * 10000)
|
||||
};
|
||||
data.add(row);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate data with special characters that need CSV escaping
|
||||
*/
|
||||
public static List<Map<String, Object>> generateProblematicData() {
|
||||
List<Map<String, Object>> data = new ArrayList<>();
|
||||
|
||||
// Row with quotes
|
||||
Map<String, Object> row1 = new HashMap<>();
|
||||
row1.put("id", "1");
|
||||
row1.put("description", "This has \"quotes\" in it");
|
||||
row1.put("name", "John \"The Boss\" Smith");
|
||||
data.add(row1);
|
||||
|
||||
// Row with commas
|
||||
Map<String, Object> row2 = new HashMap<>();
|
||||
row2.put("id", "2");
|
||||
row2.put("description", "Values, with, many, commas");
|
||||
row2.put("name", "Smith, John");
|
||||
data.add(row2);
|
||||
|
||||
// Row with newlines
|
||||
Map<String, Object> row3 = new HashMap<>();
|
||||
row3.put("id", "3");
|
||||
row3.put("description", "First line\nSecond line\nThird line");
|
||||
row3.put("name", "Multi\nLine\nName");
|
||||
data.add(row3);
|
||||
|
||||
// Row with null values
|
||||
Map<String, Object> row4 = new HashMap<>();
|
||||
row4.put("id", "4");
|
||||
row4.put("description", null);
|
||||
row4.put("name", null);
|
||||
data.add(row4);
|
||||
|
||||
// Row with special characters
|
||||
Map<String, Object> row5 = new HashMap<>();
|
||||
row5.put("id", "5");
|
||||
row5.put("description", "Special chars: @#$%^&*()_+-=[]{}|;':\",./<>?");
|
||||
row5.put("name", "Ñoño José");
|
||||
data.add(row5);
|
||||
|
||||
// Row with tabs
|
||||
Map<String, Object> row6 = new HashMap<>();
|
||||
row6.put("id", "6");
|
||||
row6.put("description", "Tab\tseparated\tvalues");
|
||||
row6.put("name", "Tab\tUser");
|
||||
data.add(row6);
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate edge case datasets for boundary testing
|
||||
*/
|
||||
public static class EdgeCaseDatasets {
|
||||
public static final int JUST_UNDER_LIMIT = 599999;
|
||||
public static final int EXACTLY_AT_LIMIT = 600000;
|
||||
public static final int JUST_OVER_LIMIT = 600001;
|
||||
public static final int ONE_AND_HALF_CHUNKS = 900000;
|
||||
public static final int EXACTLY_TWO_CHUNKS = 1200000;
|
||||
public static final int MULTIPLE_CHUNKS = 2400000;
|
||||
|
||||
public static List<Map<String, Object>> getDataset(int size) {
|
||||
return generateMapData(size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate large dataset in chunks to avoid memory issues
|
||||
*/
|
||||
public static Iterator<Map<String, Object>> generateLargeDataIterator(int totalRows) {
|
||||
return new Iterator<Map<String, Object>>() {
|
||||
private int currentRow = 0;
|
||||
private final Random random = new Random();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return currentRow < totalRows;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
|
||||
Map<String, Object> row = new HashMap<>();
|
||||
row.put("id", UUID.randomUUID().toString());
|
||||
row.put("rowNumber", currentRow);
|
||||
row.put("timestamp", System.currentTimeMillis());
|
||||
row.put("data", "Row " + currentRow + " of " + totalRows);
|
||||
row.put("random", random.nextDouble());
|
||||
|
||||
currentRow++;
|
||||
return row;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static String generatePhoneNumber() {
|
||||
Random random = new Random();
|
||||
return String.format("(%03d) %03d-%04d",
|
||||
random.nextInt(900) + 100,
|
||||
random.nextInt(900) + 100,
|
||||
random.nextInt(9000) + 1000);
|
||||
}
|
||||
|
||||
private static LocalDate generateRandomDate() {
|
||||
long minDay = LocalDate.of(2020, 1, 1).toEpochDay();
|
||||
long maxDay = LocalDate.of(2024, 12, 31).toEpochDay();
|
||||
long randomDay = ThreadLocalRandom.current().nextLong(minDay, maxDay);
|
||||
return LocalDate.ofEpochDay(randomDay);
|
||||
}
|
||||
}
|
||||
43
config.md
43
config.md
|
|
@ -1,7 +1,5 @@
|
|||
# Описание параметров конфигурации сервера приложений
|
||||
|
||||
Файл /standalone/configuration/standalone.xml
|
||||
|
||||
## Общие
|
||||
|
||||
- `webbpm.security.login.case_sensitive`. Default value = true. Параметр отвечающий за чувствительность к регистру при обработке логина пользователя.
|
||||
|
|
@ -12,6 +10,15 @@
|
|||
- development - в этом режиме используется соединение к БД проекта, заданное в Studio.
|
||||
- production - в этом режиме используется соединение к БД проекта, заданное с помощью jndi имени.
|
||||
|
||||
## Модуль BPMN
|
||||
|
||||
```bpmn.enable``` - по умолчанию true, отвечает за создание бинов использующих модуль bpmn (при флаге false можно убрать зависимости модуля, смотри **docs/ru/developer-guide/Отключение_bpmn_модуля.md**)
|
||||
|
||||
Пример:
|
||||
```xml
|
||||
<property name="bpmn.enable" value="false"/>
|
||||
```
|
||||
|
||||
## jBPM
|
||||
|
||||
- `com.arjuna.ats.arjuna.allowMultipleLastResources`
|
||||
|
|
@ -94,7 +101,7 @@
|
|||
Проверить успешность авторизации principal-а можно командой `kinit -t -i http.keytab %principal%`.
|
||||
В
|
||||
случае успешной авторизации команда `klist` в качестве default principal которым проводилась авторизация.
|
||||
После этого в standalone.xml поправить параметр `app.service-principal` на principal, которым успешно авторизовались. principal имеет формат: `HTTP/%hostname%@%REALM%`
|
||||
После этого поправить параметр `app.service-principal` на principal, которым успешно авторизовались. principal имеет формат: `HTTP/%hostname%@%REALM%`
|
||||
|
||||
Пример конфигурации:
|
||||
|
||||
|
|
@ -191,7 +198,7 @@
|
|||
... 69 more
|
||||
```
|
||||
|
||||
необходимо проверить правильность указанного в standalone.xml principal-а.
|
||||
необходимо проверить правильность указанного principal-а.
|
||||
|
||||
### Комбинации нескольких способов аутентификации
|
||||
|
||||
|
|
@ -328,13 +335,13 @@
|
|||
<jndi-resource name="java:comp/env/webbpm/testResource" type="bpmn.handler.common.SmtpConfiguration">{"host":"host","port":1234,"login":"user","password":"password","from":"email_from","senderName":"sender_name","isSecured":true}</jndi-resource>
|
||||
```
|
||||
Почтовый сервер - зарегистрированный актуальный почтовый адрес. В поле password нужно указывать не пароль для входа в почту, а создать пароль для приложений в учетке почты и указать его.
|
||||
2. Для включения регистрации добавьте в *standalone.xml* свойство
|
||||
2. Для включения регистрации добавьте свойство
|
||||
```
|
||||
<property name="registration.enabled" value="true"/>
|
||||
registration.enabled=true"
|
||||
```
|
||||
3. Также в *standalone.xml* укажите ресурс для отправки писем для подтверждения регистрации (из п.1)
|
||||
3. Укажите ресурс для отправки писем для подтверждения регистрации (из п.1)
|
||||
```
|
||||
<property name="mail.jndi.resource.name" value="java:comp/env/webbpm/testResource"/>
|
||||
mail.jndi.resource.name=java:comp/env/webbpm/testResource
|
||||
```
|
||||
4. При необходимости, отредактируйте шаблон письма для подтверждения регистрации
|
||||
(resources/src/main/resources/mail/confirmation.html)
|
||||
|
|
@ -483,8 +490,6 @@ public class Main {
|
|||
|
||||
## Настройка логов
|
||||
|
||||
Все настройки делаются в файле `standalone.xml`, если не указано иначе.
|
||||
|
||||
### Общие настройки
|
||||
|
||||
Платформа Web-bpm использует корневую категорию логирования `ru.cg.webbpm`, рекомендуется выставлять ее в уровень `info`. todo check prod config
|
||||
|
|
@ -686,7 +691,6 @@ JBPM использует 3 корневых категории логирова
|
|||
- `dev_mode` - настройка задающая dev_mode для просмотра логов (true/false). При отсутствие оставляет значение при сборке
|
||||
- `guard.confirm_exit` - выводить или нет диалог подтверждения, если обнаружены несохраненные данные в форме. Значение по умолчанию - false.
|
||||
- `password.pattern` - Регулярное выражение для валидации пароля.
|
||||
- `password_pattern_error` - Сообщение об ошибке валидации.
|
||||
- `show.client.errors` - отвечает за отображение ошибок javascript-a пользователю (должна использоваться только в тестовых контурах) по умолчанию выключена
|
||||
- 'available_task.single_fetch' - Отвечает за количество запросов available_task при завершении процесса. true - одиночный запрос, false/не указано - 10 запросов(старая реализация).
|
||||
|
||||
|
|
@ -701,19 +705,30 @@ JBPM использует 3 корневых категории логирова
|
|||
|
||||
### Esmart
|
||||
|
||||
- `electronic_sign.esmart_extension_url` - url для создания расширенной подписи. Подробная информация по ссылке [http://demo.esmart.ru](http://demo.esmart.ru)
|
||||
- `electronic_sign.tsp_address` - адрес сервера службы штампов времени
|
||||
- `electronic_sign.esmart_plugin.tsp_address` - url для создания расширенной подписи. Подробная информация по ссылке [http://demo.esmart.ru](http://demo.esmart.ru)
|
||||
- `electronic_sign.cades_plugin.tsp_address` - адрес сервера службы штампов времени
|
||||
|
||||
Пример:
|
||||
|
||||
```text
|
||||
"electronic_sign.esmart_extension_url": "http://dsig.ibsdemo.ru/ibs_dsig/ibs_dSig.asmx"
|
||||
"electronic_sign.esmart_plugin.tsp_address": " http://dsig.rubydemo.ru/ibs_dsig/ibs_dSig.asmx"
|
||||
```
|
||||
|
||||
## Способ аутентификации
|
||||
|
||||
- `auth_method` - способ аутентификации. Может принимать одно значение из списка: form, kerberos, cert_over_db, cert_over_ldap
|
||||
|
||||
## Url проекта
|
||||
|
||||
- `backend.url` - задает url для проека. Если параметра нет, то берется дефолтные:
|
||||
```
|
||||
window.location.protocol + '//' + window.location.hostname + ':' + window.location.port + '/backend'
|
||||
```
|
||||
Пример:
|
||||
```
|
||||
http://10.10.31.80:8080/
|
||||
```
|
||||
|
||||
## Таймер очистки закешированных значений фильтров
|
||||
|
||||
- `filter_cleanup_interval_hours` - время жизни закешированного значения фильтра в часах. По умолчанию - 720 часов,
|
||||
|
|
|
|||
1
config/.env
Normal file
1
config/.env
Normal file
|
|
@ -0,0 +1 @@
|
|||
IMAGE=eks-app:latest
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
FROM quay.io/wildfly/wildfly:26.1.3.Final-jdk17
|
||||
USER root
|
||||
ARG POSTGRES_DRIVER_VERSION=42.7.3
|
||||
RUN yum-config-manager --disable base --disable extras --disable updates \
|
||||
&& yum-config-manager --enable C7.8.2003-base --enable C7.8.2003-extras --enable C7.8.2003-updates \
|
||||
&& yum -y --disableplugin=fastestmirror install sudo \
|
||||
&& chown -R jboss: /opt/jboss/
|
||||
|
||||
USER jboss
|
||||
WORKDIR $JBOSS_HOME
|
||||
ENV JAVA_ARGS=-Xmx3g
|
||||
RUN echo 'JAVA_OPTS="$JAVA_OPTS -agentlib:jdwp=transport=dt_socket,address=*:8787,server=y,suspend=n"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:MaxMetaspaceSize=1g"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:+UseStringDeduplication -XX:+ParallelRefProcEnabled -XX:+ExplicitGCInvokesConcurrent"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:+UnlockDiagnosticVMOptions -XX:G1SummarizeRSetStatsPeriod=1"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:-OmitStackTraceInFastThrow"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -Dorg.jboss.logmanager.nocolor=true"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS $JAVA_ARGS"' >> bin/standalone.conf
|
||||
|
||||
### Locale support ru_RU ###
|
||||
USER root
|
||||
RUN localedef -i ru_RU -f UTF-8 ru_RU.UTF-8
|
||||
RUN echo "LANG=\"ru_RU.UTF-8\"" > /etc/locale.conf
|
||||
USER jboss
|
||||
ENV LANG ru_RU.UTF-8
|
||||
ENV LANGUAGE ru_RU.UTF-8
|
||||
ENV LC_ALL ru_RU.UTF-8
|
||||
### Locale Support END ###
|
||||
|
||||
COPY --chown=jboss entrypoint.sh .
|
||||
COPY --chown=jboss patches patches
|
||||
RUN (cd patches && wget https://repo.micord.ru/repository/libs-releases-local/org/jboss/ironjacamar/ironjacamar-core-impl/1.5.3.Final/ironjacamar-core-impl-1.5.3.Final.jar)
|
||||
RUN (cd patches/system && wget https://repo1.maven.org/maven2/org/postgresql/postgresql/$POSTGRES_DRIVER_VERSION/postgresql-$POSTGRES_DRIVER_VERSION.jar -O postgresql-driver.jar)
|
||||
|
||||
RUN chmod -R +x patches && \
|
||||
chmod +x entrypoint.sh && \
|
||||
./entrypoint.sh && \
|
||||
rm -rf patches
|
||||
|
||||
ENV SERVER_START=true
|
||||
COPY --chown=jboss *.ear $JBOSS_HOME/standalone/deployments/
|
||||
COPY --chown=jboss *.war $JBOSS_HOME/standalone/deployments/
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
FROM maven:3-openjdk-17-slim AS builder
|
||||
RUN apt update \
|
||||
&& apt upgrade -y \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_14.x | bash - \
|
||||
&& apt install -y git nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
COPY ../ .
|
||||
RUN mvn clean \
|
||||
&& mvn package -T4C \
|
||||
&& cd config-data-executor \
|
||||
&& mvn clean \
|
||||
&& mvn package -T4C
|
||||
|
||||
FROM quay.io/wildfly/wildfly:26.1.3.Final-jdk17
|
||||
USER root
|
||||
ARG POSTGRES_DRIVER_VERSION=42.7.3
|
||||
RUN yum-config-manager --disable base --disable extras --disable updates \
|
||||
&& yum-config-manager --enable C7.8.2003-base --enable C7.8.2003-extras --enable C7.8.2003-updates \
|
||||
&& yum -y --disableplugin=fastestmirror install sudo \
|
||||
&& chown -R jboss: /opt/jboss/
|
||||
|
||||
USER jboss
|
||||
WORKDIR $JBOSS_HOME
|
||||
ENV JAVA_ARGS=-Xmx3g
|
||||
RUN echo 'JAVA_OPTS="$JAVA_OPTS -agentlib:jdwp=transport=dt_socket,address=*:8787,server=y,suspend=n"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:MaxMetaspaceSize=1g"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:+UseStringDeduplication -XX:+ParallelRefProcEnabled -XX:+ExplicitGCInvokesConcurrent"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:+UnlockDiagnosticVMOptions -XX:G1SummarizeRSetStatsPeriod=1"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -XX:-OmitStackTraceInFastThrow"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS -Dorg.jboss.logmanager.nocolor=true"' >> bin/standalone.conf && \
|
||||
echo 'JAVA_OPTS="$JAVA_OPTS $JAVA_ARGS"' >> bin/standalone.conf
|
||||
|
||||
### Locale support ru_RU ###
|
||||
USER root
|
||||
RUN localedef -i ru_RU -f UTF-8 ru_RU.UTF-8
|
||||
RUN echo "LANG=\"ru_RU.UTF-8\"" > /etc/locale.conf
|
||||
USER jboss
|
||||
ENV LANG ru_RU.UTF-8
|
||||
ENV LANGUAGE ru_RU.UTF-8
|
||||
ENV LC_ALL ru_RU.UTF-8
|
||||
### Locale Support END ###
|
||||
|
||||
COPY --chown=jboss config/entrypoint.sh .
|
||||
COPY --chown=jboss config/patches patches
|
||||
RUN (cd patches && wget https://repo.micord.ru/repository/libs-releases-local/org/jboss/ironjacamar/ironjacamar-core-impl/1.5.3.Final/ironjacamar-core-impl-1.5.3.Final.jar)
|
||||
RUN (cd patches/system && wget https://repo1.maven.org/maven2/org/postgresql/postgresql/$POSTGRES_DRIVER_VERSION/postgresql-$POSTGRES_DRIVER_VERSION.jar -O postgresql-driver.jar)
|
||||
|
||||
RUN chmod -R +x patches && \
|
||||
chmod +x entrypoint.sh && \
|
||||
./entrypoint.sh && \
|
||||
rm -rf patches
|
||||
|
||||
ENV SERVER_START=true
|
||||
COPY --from=builder --chown=jboss /app/distribution/target/*.ear $JBOSS_HOME/standalone/deployments/
|
||||
COPY --from=builder --chown=jboss /app/config-data-executor/target/*.war $JBOSS_HOME/standalone/deployments/
|
||||
18
config/Dockerfile.tc
Normal file
18
config/Dockerfile.tc
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
ARG RUNTIME_IMAGE=reg.gitlab.micord.ru/common/base/alt/alt-p11-jre17:latest
|
||||
|
||||
FROM $RUNTIME_IMAGE
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install nginx
|
||||
|
||||
ENV BACKEND_URL=http://localhost:8080
|
||||
ENV CONFIG_DATA_EXECUTOR_URL=http://localhost:8080/api
|
||||
|
||||
COPY nginx.conf /etc/nginx/nginx.conf
|
||||
COPY dist /usr/share/nginx/html
|
||||
COPY backend.jar /home/app/backend.jar
|
||||
COPY config-data-executor.jar /home/app/cde.jar
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
ENTRYPOINT ["java", "-jar", "/home/app/backend.jar"]
|
||||
1
config/NODEJS_version
Normal file
1
config/NODEJS_version
Normal file
|
|
@ -0,0 +1 @@
|
|||
v20
|
||||
88
config/application.yaml
Normal file
88
config/application.yaml
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
config:
|
||||
data:
|
||||
executor:
|
||||
socket:
|
||||
timeout: 10
|
||||
connection:
|
||||
timeout: 10
|
||||
url: http://localhost:8080/api
|
||||
webbpm:
|
||||
mode: development
|
||||
security:
|
||||
token:
|
||||
secret:
|
||||
key: de2c9de3-0de5-47c1-a8de-0fad6f93873b-com.example-ervu_eks_5-17_10_2025_13:18
|
||||
server:
|
||||
servlet:
|
||||
context-path: /ervu-eks
|
||||
bpmn:
|
||||
enable: false
|
||||
gar:
|
||||
enable: false
|
||||
fias:
|
||||
enable: false
|
||||
spring:
|
||||
mvc:
|
||||
servlet:
|
||||
loadOnStartup: 1
|
||||
jackson:
|
||||
serialization:
|
||||
write-dates-as-timestamps: false
|
||||
write-enums-using-to-string: true
|
||||
deserialization:
|
||||
read-enums-using-to-string: true
|
||||
task:
|
||||
scheduling:
|
||||
pool:
|
||||
size: 12
|
||||
datasource:
|
||||
person:
|
||||
url: jdbc:postgresql://127.0.0.1:5432/person_registry
|
||||
username: ${SPRING_DATASOURCE_PERSON_USERNAME}
|
||||
password: ${SPRING_DATASOURCE_PERSON_PASSWORD}
|
||||
driverClassName: org.postgresql.Driver
|
||||
dbBeanPackage: com.example.ervu_eks_5.db_beans.person
|
||||
sqlDialect: POSTGRES
|
||||
default: false
|
||||
manually: true
|
||||
schemas:
|
||||
- public
|
||||
security:
|
||||
url: jdbc:postgresql://127.0.0.1:5432/ervu_eks
|
||||
username: ${SPRING_DATASOURCE_SECURITY_USERNAME}
|
||||
password: ${SPRING_DATASOURCE_SECURITY_PASSWORD}
|
||||
driverClassName: org.postgresql.Driver
|
||||
dbBeanPackage: com.example.ervu_eks_5.db_beans.security
|
||||
sqlDialect: POSTGRES
|
||||
maxSize: 7
|
||||
metricsEnabled: true
|
||||
default: true
|
||||
manually: true
|
||||
schemas:
|
||||
- security
|
||||
monitoring:
|
||||
white:
|
||||
ip:
|
||||
list: 127.0.0.1, 0:0:0:0:0:0:0:1
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: info,health,metrics,prometheus
|
||||
metrics:
|
||||
export:
|
||||
prometheus:
|
||||
enabled: true
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
org:
|
||||
jooq:
|
||||
tools: DEBUG
|
||||
ru:
|
||||
cg:
|
||||
webbpm:
|
||||
modules:
|
||||
database:
|
||||
impl:
|
||||
analytics: DEBUG
|
||||
|
|
@ -1,6 +1,51 @@
|
|||
<Requests>
|
||||
<DownloadRequest>
|
||||
<DownloadRequestType>Type_A</DownloadRequestType>
|
||||
<AQLDownloadRequest>
|
||||
<RequestValidationRules
|
||||
isEmptyIdsAllowed="false"
|
||||
isEmptyDatesAllowed="false"
|
||||
isIdsFormatted="true"
|
||||
/>
|
||||
<DownloadRequestType>Arango</DownloadRequestType>
|
||||
<DownloadRequestEntitySelectorQuery>
|
||||
<![CDATA[
|
||||
WITH applications, subject, history, edges
|
||||
|
||||
FOR app IN applications
|
||||
FILTER app.statement.recruitsData.mainInfo[0].id IN @ids
|
||||
LET parentEdges = (
|
||||
FOR vertex, edge, path
|
||||
IN 1..1
|
||||
OUTBOUND app._id edges
|
||||
OPTIONS { uniqueVertices: "path" }
|
||||
FILTER edge.field IN ["applicant", "history", "interdepreq"]
|
||||
RETURN { edgesId: edge._key, parent: DOCUMENT(vertex._id) }
|
||||
)
|
||||
RETURN {
|
||||
applicationId: app._key,
|
||||
edgesId: (FOR e IN parentEdges RETURN e.edgesId),
|
||||
subjectId: (FOR e IN parentEdges FILTER e.parent.schema == "Subject" RETURN e.parent._key),
|
||||
historyId: (FOR e IN parentEdges FILTER e.parent.schema == "History" RETURN e.parent._key),
|
||||
interdepreqId: (FOR e IN parentEdges FILTER e.parent.schema == "Interdepreq" RETURN e.parent._key)
|
||||
}
|
||||
]]>
|
||||
</DownloadRequestEntitySelectorQuery>
|
||||
<AqlRequestCollections>
|
||||
<AqlRequestCollection type="read" dateAttribute="statement.recruitsData.mainInfo[0].systemCreateDate">applications</AqlRequestCollection>
|
||||
<AqlRequestCollection type="read" dateAttribute="date">history</AqlRequestCollection>
|
||||
</AqlRequestCollections>
|
||||
<AqlConnectionParams>
|
||||
<Host>localhost</Host>
|
||||
<Port>8529</Port>
|
||||
<Username>root</Username>
|
||||
<Password>test</Password>
|
||||
<Database>_system</Database>
|
||||
</AqlConnectionParams>
|
||||
</AQLDownloadRequest>
|
||||
<SQLDownloadRequest>
|
||||
<DownloadRequestType>По конкретным id</DownloadRequestType>
|
||||
<RequestValidationRules
|
||||
isEmptyDatesAllowed="true"
|
||||
/>
|
||||
<RequestURL>
|
||||
Select system_id_ern from public.recruits where id in ${endpointArguments};
|
||||
</RequestURL>
|
||||
|
|
@ -15,11 +60,17 @@
|
|||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
<JdbcXaDataSourceBorrowConnectionTimeout>4000</JdbcXaDataSourceBorrowConnectionTimeout>
|
||||
</SqlConnectionParams>
|
||||
</DownloadRequest>
|
||||
<DownloadRequest>
|
||||
<DownloadRequestType>Type_B</DownloadRequestType>
|
||||
</SQLDownloadRequest>
|
||||
<SQLDownloadRequest>
|
||||
<RequestValidationRules
|
||||
isEmptyIdsAllowed="true"
|
||||
isEmptyDatesAllowed="true"
|
||||
isIdsFormatted="true"
|
||||
/>
|
||||
<DownloadRequestType>С выделенным подзапросом (100)</DownloadRequestType>
|
||||
<RequestArgumentLimit>100</RequestArgumentLimit>
|
||||
<RequestURL>
|
||||
Select system_id_ern from public.recruits where id in ${endpointArguments};
|
||||
Select system_id_ern from public.recruits where id in ${additionalArguments};
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
|
|
@ -32,11 +83,34 @@
|
|||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
<JdbcXaDataSourceBorrowConnectionTimeout>4000</JdbcXaDataSourceBorrowConnectionTimeout>
|
||||
</SqlConnectionParams>
|
||||
</DownloadRequest>
|
||||
<DownloadRequest>
|
||||
<DownloadRequestType>Type_C</DownloadRequestType>
|
||||
<RequestArgument>
|
||||
<RequestArgumentURL>
|
||||
select id from public.recruits;
|
||||
</RequestArgumentURL>
|
||||
<RequestArgumentName>additionalArguments</RequestArgumentName>
|
||||
<RequestArgumentConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
<JdbcXaDataSourceBorrowConnectionTimeout>4000</JdbcXaDataSourceBorrowConnectionTimeout>
|
||||
</RequestArgumentConnectionParams>
|
||||
</RequestArgument>
|
||||
</SQLDownloadRequest>
|
||||
<SQLDownloadRequest>
|
||||
<RequestValidationRules
|
||||
isEmptyIdsAllowed="true"
|
||||
isEmptyDatesAllowed="true"
|
||||
isIdsFormatted="true"
|
||||
/>
|
||||
<DownloadRequestType>Одним запросом (50)</DownloadRequestType>
|
||||
<RequestArgumentLimit>50</RequestArgumentLimit>
|
||||
<RequestURL>
|
||||
Select system_id_ern from public.recruits where id in ${endpointArguments};
|
||||
Select system_id_ern from public.recruits;
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
|
|
@ -49,6 +123,6 @@
|
|||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
<JdbcXaDataSourceBorrowConnectionTimeout>4000</JdbcXaDataSourceBorrowConnectionTimeout>
|
||||
</SqlConnectionParams>
|
||||
</DownloadRequest>
|
||||
</SQLDownloadRequest>
|
||||
|
||||
</Requests>
|
||||
|
|
@ -3,39 +3,26 @@
|
|||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
DELETE FROM summoned_list where recruit_id in ${endpointArguments};
|
||||
DELETE FROM recruit_active_list where recruit_id in ${endpointArguments};
|
||||
DELETE FROM documents where recruit_id in ${endpointArguments};
|
||||
DELETE FROM subpoena where recruit_id in ${endpointArguments};
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>summon-list-registry</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
$$
|
||||
DECLARE
|
||||
t varchar(100);
|
||||
begin
|
||||
select pg_sleep(60) into t;
|
||||
--delete from recruits where 1=2;
|
||||
|
||||
update recruits
|
||||
set system_pgs_status=null, extra_info = jsonb_set(coalesce(extra_info,'{}'::jsonb), '{blocked}',
|
||||
('{"cur":'|| coalesce('"'||current_recruitment_id::text||'"','null')||
|
||||
',"trg":'|| coalesce('"'||target_recruitment_id::text||'"','null')||
|
||||
',"dio":'|| coalesce('"'||department_id_old::text||'"','null')||
|
||||
',"st":'|| coalesce('"'||system_pgs_status::text||'"','null')||'}')::jsonb
|
||||
)
|
||||
where id IN ${endpointArguments};
|
||||
|
||||
end
|
||||
$$;
|
||||
|
||||
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
update recruits
|
||||
set current_recruitment_id = jsonb_extract_path_text(extra_info,'blocked','cur')::uuid
|
||||
,target_recruitment_id = jsonb_extract_path_text(extra_info,'blocked','trg')::uuid
|
||||
,department_id_old = jsonb_extract_path_text(extra_info,'blocked','dio')::uuid
|
||||
,system_pgs_status = '1.2'
|
||||
,conscription = null
|
||||
,extra_info = extra_info - ('blocked')
|
||||
where id in ${endpointArguments};
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
|
|
@ -49,4 +36,5 @@
|
|||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
</Requests>
|
||||
244
config/cde-xml/removeMilitaryDraftNotices.xml
Normal file
244
config/cde-xml/removeMilitaryDraftNotices.xml
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
<Requests>
|
||||
<!-- 002_2-ervu_subpoena_registry_delete_all_with_recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
DELETE FROM subpoena_history where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments});
|
||||
DELETE FROM subpoena_appearance where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments});
|
||||
DELETE FROM subpoena_send_info where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments});
|
||||
|
||||
DELETE FROM notification_item where restriction_document_item_id in (
|
||||
select id from restriction_document_item where restriction_document_create_id in (
|
||||
select id FROM restriction_document where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments})));
|
||||
DELETE FROM notification_item where restriction_document_item_id in (
|
||||
select id from restriction_document_item where restriction_document_cancel_id in (
|
||||
select id FROM restriction_document where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments})));
|
||||
|
||||
DELETE FROM restriction_document_item_history WHERE recruit_id in ${endpointArguments};
|
||||
DELETE FROM restriction_document_item where restriction_document_create_id in (
|
||||
select id FROM restriction_document where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments}));
|
||||
DELETE FROM restriction_document_item where restriction_document_cancel_id in (
|
||||
select id FROM restriction_document where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments}));
|
||||
DELETE FROM restriction_document where subpoena_id in (select id FROM subpoena where recruit_id in ${endpointArguments});
|
||||
DELETE FROM subpoena where recruit_id in ${endpointArguments};
|
||||
DELETE FROM recruits WHERE id in ${endpointArguments};
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>subpoena</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 005-ervu_decision_document-delete-recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
DELETE FROM decision where recruit_id in ${endpointArguments};
|
||||
DELETE FROM foreign_decision where recruit_id in ${endpointArguments};
|
||||
DELETE FROM infringement where recruit_id in ${endpointArguments};
|
||||
DELETE FROM system_document where attachment_id in (SELECT id FROM attachment where recruit_id in ${endpointArguments}) ;
|
||||
DELETE FROM attachment where recruit_id in ${endpointArguments};
|
||||
DELETE FROM recruit where id in ${endpointArguments};
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>decision-document-service</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 004-ervu_journal-delete-recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
DECLARE
|
||||
recruitIds uuid[] := ${endpointArguments};
|
||||
importIds uuid[] := ARRAY(SELECT import_id
|
||||
FROM import_results
|
||||
WHERE recruit_id = ANY (recruitIds));
|
||||
BEGIN
|
||||
WITH ImportDeletes AS (DELETE FROM import_journal_start WHERE import_id = ANY (importIds) RETURNING journal_id)
|
||||
DELETE
|
||||
FROM journal
|
||||
WHERE id IN (SELECT journal_id FROM ImportDeletes);
|
||||
|
||||
DELETE FROM import_journal_finish WHERE import_id = ANY (importIds);
|
||||
DELETE FROM import_events_journal WHERE import_id = ANY (importIds);
|
||||
DELETE FROM import_validation_errors WHERE import_id = ANY (importIds);
|
||||
DELETE FROM import_results WHERE import_id = ANY (importIds);
|
||||
DELETE FROM uploaded_files WHERE extra_info ->> 'importId' = ANY (importIds::text[]);
|
||||
DELETE FROM object_history WHERE object_id = ANY (recruitIds);
|
||||
DELETE FROM object_versions WHERE object_id = ANY (recruitIds);
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>journal_dev</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 008-ervu_geps_notifications-delete-recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
DECLARE
|
||||
var1 uuid[] := '{
|
||||
${endpointArguments}
|
||||
}';
|
||||
BEGIN
|
||||
DELETE FROM notifications where recruit_id = any (var1);
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>geps_notifications</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 007_2-ervu_incidents-delete-all_of_recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
DECLARE
|
||||
var1 uuid[] := '{
|
||||
${endpointArguments}
|
||||
}';
|
||||
BEGIN
|
||||
DELETE
|
||||
FROM incident_history
|
||||
where incident_id in (select id from incident where recruit_id = any (var1));
|
||||
DELETE FROM incident where recruit_id = any (var1);
|
||||
DELETE FROM recruits where id = any (var1);
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>incidents</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 001-ervu_person_registry-delete-recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
DECLARE
|
||||
var1 uuid[] := '{
|
||||
${endpointArguments}
|
||||
}';
|
||||
BEGIN
|
||||
DELETE FROM recruits_info where recruit_id = any (var1);
|
||||
-- DELETE FROM recruit_archive where recruit_id = any (var1);
|
||||
DELETE FROM recruit_xml_data where recruit_id = any (var1);
|
||||
DELETE FROM recruits_history where recruit_id = any (var1);
|
||||
DELETE FROM application where recruit_id = any (var1);
|
||||
DELETE FROM department_history where recruit_id = any (var1);
|
||||
DELETE FROM documents where recruit_id = any (var1);
|
||||
DELETE FROM decision where recruit_id = any (var1);
|
||||
DELETE FROM personal_documents where recruit_id = any (var1);
|
||||
DELETE FROM recruit_private_file where recruit_id = any (var1);
|
||||
DELETE FROM system_documents where recruit_id = any (var1);
|
||||
DELETE FROM system_document_dto where recruit_id = any (var1);
|
||||
DELETE FROM subpoena_dto where recruit_id = any (var1);
|
||||
DELETE FROM attachments where recruit_id = any (var1);
|
||||
DELETE FROM summoned_list where recruit_id = any (var1);
|
||||
DELETE FROM recruits where id = any (var1);
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
|
||||
<!-- 009_1-ervu_appeal_document-delete-appeal-with-recruit -->
|
||||
<SqlRequest>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
$$
|
||||
DECLARE
|
||||
var1 uuid[] := '{
|
||||
${endpointArguments}
|
||||
}';
|
||||
BEGIN
|
||||
DELETE FROM appeal_document where recruit_id = any (var1);
|
||||
DELETE FROM recruit where id = any (var1);
|
||||
END
|
||||
$$;
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>appeal-document-service</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</SqlRequest>
|
||||
</Requests>
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
<Requests>
|
||||
<SqlRequest>
|
||||
<RequestValidationRules
|
||||
isEmptyIdsAllowed="false"
|
||||
isIdsFormatted="true"
|
||||
/>
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
DO
|
||||
|
|
|
|||
19
config/cde-xml/validateBlock.xml
Normal file
19
config/cde-xml/validateBlock.xml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
<ValidationRules>
|
||||
<ValidationRule idColumn="id" validationColumns="is_valid">
|
||||
<RequestURL>
|
||||
<![CDATA[
|
||||
SELECT id, true as is_valid from recruits, pg_sleep(1) where id in ${endpointArguments}
|
||||
]]>
|
||||
</RequestURL>
|
||||
<SqlConnectionParams>
|
||||
<JdbcHost>10.10.31.118</JdbcHost>
|
||||
<JdbcPort>5432</JdbcPort>
|
||||
<JdbcUsername>ervu</JdbcUsername>
|
||||
<JdbcPassword>ervu</JdbcPassword>
|
||||
<JdbcDriverClassName>org.postgresql.Driver</JdbcDriverClassName>
|
||||
<JdbcXaDataSourceClassName>org.postgresql.xa.PGXADataSource</JdbcXaDataSourceClassName>
|
||||
<JdbcXaDataSourcePoolSize>50</JdbcXaDataSourcePoolSize>
|
||||
<JdbcDatabase>person_registry</JdbcDatabase>
|
||||
</SqlConnectionParams>
|
||||
</ValidationRule>
|
||||
</ValidationRules>
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# App datasource
|
||||
DB_APP_NAME=ervu_eks
|
||||
DB_APP_HOST=db
|
||||
DB_APP_PORT=5432
|
||||
DB_APP_USERNAME=ervu_eks
|
||||
DB_APP_PASSWORD=ervu_eks
|
||||
|
||||
# Security datasource
|
||||
DB_SEC_NAME=ervu_eks
|
||||
DB_SEC_HOST=db
|
||||
DB_SEC_PORT=5432
|
||||
DB_SEC_USERNAME=ervu_eks_sec
|
||||
DB_SEC_PASSWORD=ervu_eks_sec
|
||||
|
||||
# Person datasource
|
||||
DB_PERSON_NAME=person_registry
|
||||
DB_PERSON_HOST=10.10.31.118
|
||||
DB_PERSON_PORT=5432
|
||||
DB_PERSON_USERNAME=ervu
|
||||
DB_PERSON_PASSWORD=ervu
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
version: "3"
|
||||
services:
|
||||
db:
|
||||
image: postgres:15-bullseye
|
||||
volumes:
|
||||
- ./initdb.d:/docker-entrypoint-initdb.d
|
||||
- db-data:/var/lib/postgresql/data
|
||||
command:
|
||||
- "--max_prepared_transactions=100"
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432"
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=supersecretpassword
|
||||
- TZ="Europe/Moscow"
|
||||
|
||||
webbpm-app:
|
||||
image: ervu-eks:latest
|
||||
depends_on:
|
||||
- db
|
||||
ports:
|
||||
- "127.0.0.1:9990:9990"
|
||||
- "127.0.0.1:8080:8080"
|
||||
volumes:
|
||||
- ./cde-xml:/cde-xml
|
||||
environment:
|
||||
- TZ="Europe/Moscow"
|
||||
env_file:
|
||||
- db.env
|
||||
|
||||
volumes:
|
||||
db-data:
|
||||
34
config/docker-compose.tc.yaml
Normal file
34
config/docker-compose.tc.yaml
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
#version: "3"
|
||||
services:
|
||||
eks-backend:
|
||||
build:
|
||||
dockerfile: Dockerfile.tc
|
||||
entrypoint: ["java", "-jar", "/home/app/backend.jar"]
|
||||
environment:
|
||||
- TZ="Europe/Moscow"
|
||||
- CONFIG_DATA_EXECUTOR_URL=http://eks-cde:8080/api
|
||||
env_file:
|
||||
- tc.env
|
||||
|
||||
eks-frontend:
|
||||
build:
|
||||
dockerfile: Dockerfile.tc
|
||||
depends_on:
|
||||
- eks-backend
|
||||
ports:
|
||||
- "80"
|
||||
entrypoint: ["nginx", "-g", "daemon off;"]
|
||||
environment:
|
||||
- TZ="Europe/Moscow"
|
||||
|
||||
eks-cde:
|
||||
build:
|
||||
dockerfile: Dockerfile.tc
|
||||
entrypoint: ["java", "-jar", "/home/app/cde.jar"]
|
||||
volumes:
|
||||
- ./cde-xml:/cde-xml
|
||||
environment:
|
||||
- CONFIGDIRECTORY=/cde-xml
|
||||
|
||||
volumes:
|
||||
db-data:
|
||||
|
|
@ -1,14 +1,45 @@
|
|||
version: "3"
|
||||
services:
|
||||
webbpm-app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
db:
|
||||
image: postgres:15-bullseye
|
||||
volumes:
|
||||
- ./initdb.d:/docker-entrypoint-initdb.d
|
||||
- db-data:/var/lib/postgresql/data
|
||||
command:
|
||||
- "--max_prepared_transactions=100"
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432"
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=supersecretpassword
|
||||
- TZ="Europe/Moscow"
|
||||
|
||||
eks-backend:
|
||||
image: ${IMAGE:-eks-app:latest}
|
||||
depends_on:
|
||||
- db
|
||||
entrypoint: ["java", "-jar", "/home/app/backend.jar"]
|
||||
environment:
|
||||
- TZ="Europe/Moscow"
|
||||
- CONFIG_DATA_EXECUTOR_URL=http://eks-cde:8080/api
|
||||
env_file:
|
||||
- eks.env
|
||||
|
||||
eks-frontend:
|
||||
image: ${IMAGE:-eks-app:latest}
|
||||
depends_on:
|
||||
- eks-backend
|
||||
ports:
|
||||
- "127.0.0.1:8080:80"
|
||||
entrypoint: ["nginx", "-g", "daemon off;"]
|
||||
environment:
|
||||
- TZ="Europe/Moscow"
|
||||
|
||||
eks-cde:
|
||||
image: ${IMAGE:-eks-app:latest}
|
||||
entrypoint: ["java", "-jar", "/home/app/cde.jar"]
|
||||
volumes:
|
||||
- ./cde-xml:/cde-xml
|
||||
ports:
|
||||
- 8080
|
||||
- 8787
|
||||
- 9990
|
||||
env_file:
|
||||
- micord.env
|
||||
environment:
|
||||
- CONFIGDIRECTORY=/cde-xml
|
||||
|
||||
volumes:
|
||||
db-data:
|
||||
|
|
|
|||
41
config/eks.env
Normal file
41
config/eks.env
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
CONFIG_DATA_EXECUTOR_SOCKET_TIMEOUT=10
|
||||
CONFIG_DATA_EXECUTOR_CONNECTION_TIMEOUT=10
|
||||
CONFIG_DATA_EXECUTOR_URL=http://localhost:8080/api
|
||||
WEBBPM_MODE=development
|
||||
WEBBPM_SECURITY_TOKEN_SECRET_KEY=de2c9de3-0de5-47c1-a8de-0fad6f93873b-com.example-ervu_eks_5-17_10_2025_13:18
|
||||
SERVER_SERVLET_CONTEXT_PATH=/ervu-eks
|
||||
BPMN_ENABLE=false
|
||||
GAR_ENABLE=false
|
||||
FIAS_ENABLE=false
|
||||
SPRING_MVC_SERVLET_LOADONSTARTUP=1
|
||||
SPRING_JACKSON_SERIALIZATION_WRITEDATESASTIMESTAMPS=false
|
||||
SPRING_JACKSON_SERIALIZATION_WRITEENUMSUSINGTOSTRING=true
|
||||
SPRING_JACKSON_DESERIALIZATION_READENUMSUSINGTOSTRING=true
|
||||
SPRING_TASK_SCHEDULING_POOL_SIZE=12
|
||||
|
||||
SPRING_DATASOURCE_PERSON_URL=jdbc:postgresql://10.10.31.118:5432/person_registry
|
||||
SPRING_DATASOURCE_PERSON_USERNAME=ervu
|
||||
SPRING_DATASOURCE_PERSON_PASSWORD=ervu
|
||||
SPRING_DATASOURCE_PERSON_DRIVERCLASSNAME=org.postgresql.Driver
|
||||
SPRING_DATASOURCE_PERSON_DBBEANPACKAGE=com.example.ervu_eks_5.db_beans.person
|
||||
SPRING_DATASOURCE_PERSON_SQLDIALECT=POSTGRES
|
||||
SPRING_DATASOURCE_PERSON_DEFAULT=false
|
||||
SPRING_DATASOURCE_PERSON_MANUALLY=true
|
||||
|
||||
SPRING_DATASOURCE_SECURITY_URL=jdbc:postgresql://db:5432/ervu_eks
|
||||
SPRING_DATASOURCE_SECURITY_USERNAME=ervu_eks_sec
|
||||
SPRING_DATASOURCE_SECURITY_PASSWORD=ervu_eks_sec
|
||||
SPRING_DATASOURCE_SECURITY_DRIVERCLASSNAME=org.postgresql.Driver
|
||||
SPRING_DATASOURCE_SECURITY_DBBEANPACKAGE=com.example.ervu_eks_5.db_beans.security
|
||||
SPRING_DATASOURCE_SECURITY_SQLDIALECT=POSTGRES
|
||||
SPRING_DATASOURCE_SECURITY_MAXSIZE=7
|
||||
SPRING_DATASOURCE_SECURITY_METRICSENABLED=true
|
||||
SPRING_DATASOURCE_SECURITY_DEFAULT=true
|
||||
SPRING_DATASOURCE_SECURITY_MANUALLY=true
|
||||
|
||||
MONITORING_WHITE_IP_LIST=127.0.0.1, 0:0:0:0:0:0:0:1
|
||||
MANAGEMENT_ENDPOINTS_WEB_EXPOSURE_INCLUDE=info,health,metrics,prometheus
|
||||
MANAGEMENT_METRICS_EXPORT_PROMETHEUS_ENABLED=true
|
||||
LOGGING_LEVEL_ROOT=INFO
|
||||
LOGGING_LEVEL_ORG_JOOQ_TOOLS=DEBUG
|
||||
LOGGING_LEVEL_RU_CG_WEBBPM_MODULES_DATABASE_IMPL_ANALYTICS=DEBUG
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
function wait_for_server() {
|
||||
until `$JBOSS_HOME/bin/jboss-cli.sh -c ":read-attribute(name=server-state)" 2> /dev/null | grep -q running`; do
|
||||
echo "Retry ..."
|
||||
done
|
||||
}
|
||||
|
||||
echo "dump environment variables to env.properties file"
|
||||
printenv > env.properties
|
||||
|
||||
echo "starting JBoss"
|
||||
nohup $JBOSS_HOME/bin/standalone.sh --admin-only 1>&2 2>/dev/null &
|
||||
|
||||
# running system patches
|
||||
wait_for_server
|
||||
$JBOSS_HOME/bin/jboss-cli.sh --connect --file="./patches/system/init.cli" --properties=env.properties
|
||||
$JBOSS_HOME/bin/jboss-cli.sh --connect --file="./patches/system/add-postgresql-driver.cli" --properties=env.properties
|
||||
bash "./patches/system/add-demo-user.sh"
|
||||
|
||||
# running project patches
|
||||
find ./patches/ -type f -name '*.cli' -not -path './patches/system/*' -print0 |
|
||||
while IFS= read -r -d '' f; do
|
||||
wait_for_server
|
||||
echo "running $f"
|
||||
$JBOSS_HOME/bin/jboss-cli.sh --connect --file="$f" --properties=env.properties
|
||||
done;
|
||||
|
||||
find ./patches/ -type f -name '*.sh' -not -path './patches/system/*' -print0 |
|
||||
while IFS= read -r -d '' f; do
|
||||
wait_for_server
|
||||
echo "running $f"
|
||||
bash "$f"
|
||||
done
|
||||
|
||||
echo "stopping JBoss"
|
||||
wait_for_server
|
||||
$JBOSS_HOME/bin/jboss-cli.sh --connect --command=:shutdown
|
||||
|
||||
if ! [[ -z $SERVER_START ]]; then
|
||||
echo "starting JBoss in standalone"
|
||||
sleep 10 # without this occurs error "address already in use"
|
||||
/opt/jboss/wildfly/bin/standalone.sh -c standalone.xml -b 0.0.0.0 -bmanagement 0.0.0.0
|
||||
else
|
||||
echo "cleaning up JBoss logs"
|
||||
rm -rf $JBOSS_HOME/standalone/log
|
||||
fi
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
TZ=Europe/Moscow
|
||||
|
||||
# App datasource
|
||||
DB_APP_USERNAME=ervu_eks
|
||||
DB_APP_PASSWORD=ervu_eks
|
||||
DB_APP_HOST=10.10.31.118
|
||||
DB_APP_PORT=5432
|
||||
DB_APP_NAME=ervu_eks
|
||||
|
||||
# Security datasource
|
||||
DB_SEC_USERNAME=ervu_eks_sec
|
||||
DB_SEC_PASSWORD=ervu_eks_sec
|
||||
DB_SEC_HOST=10.10.31.118
|
||||
DB_SEC_PORT=5432
|
||||
DB_SEC_NAME=ervu_eks
|
||||
|
||||
# Person datasource
|
||||
DB_PERSON_USERNAME=ervu
|
||||
DB_PERSON_PASSWORD=ervu
|
||||
DB_PERSON_HOST=10.10.31.118
|
||||
DB_PERSON_PORT=5432
|
||||
DB_PERSON_NAME=person_registry
|
||||
88
config/nginx.conf
Normal file
88
config/nginx.conf
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
include /etc/nginx/modules-enabled.d/*.conf;
|
||||
|
||||
worker_processes 10;
|
||||
|
||||
error_log /var/log/nginx/error.log;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
include /etc/nginx/conf-enabled.d/*.conf;
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
sendfile on;
|
||||
|
||||
gzip on;
|
||||
gzip_types text/plain text/css text/xml application/x-javascript application/atom+xml;
|
||||
|
||||
log_format nginx_main
|
||||
'$remote_addr - $remote_user [$time_local] $request '
|
||||
'"$status" $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for" '
|
||||
'"$request_filename" "$gzip_ratio" $upstream_response_time server: $host : $document_root $fastcgi_script_name ';
|
||||
|
||||
server {
|
||||
listen 80 default;
|
||||
|
||||
access_log /var/log/nginx/access.log nginx_main;
|
||||
error_log /var/log/nginx/error.log error;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
charset utf-8;
|
||||
client_max_body_size 32m;
|
||||
|
||||
gzip on;
|
||||
gzip_disable "msie6";
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_buffers 16 8k;
|
||||
gzip_http_version 1.1;
|
||||
gzip_min_length 256;
|
||||
gzip_types
|
||||
application/atom+xml
|
||||
application/geo+json
|
||||
application/javascript
|
||||
application/x-javascript
|
||||
application/json
|
||||
application/ld+json
|
||||
application/manifest+json
|
||||
application/rdf+xml
|
||||
application/rss+xml
|
||||
application/xhtml+xml
|
||||
application/xml
|
||||
font/eot
|
||||
font/otf
|
||||
font/ttf
|
||||
image/svg+xml
|
||||
text/css
|
||||
text/javascript
|
||||
text/plain
|
||||
text/xml;
|
||||
|
||||
location ~* \.(?:jpg|jpeg|gif|png|ico|cur|gz|svg|svgz|mp4|mp3|ogg|ogv|webm|htc|woff2|woff|ttf)$ {
|
||||
expires 1M;
|
||||
access_log off;
|
||||
add_header Cache-Control "max-age=2629746, public";
|
||||
}
|
||||
|
||||
location /ervu-eks/ {
|
||||
proxy_pass http://eks-backend:8080/ervu-eks/;
|
||||
proxy_set_header Accept application/json;
|
||||
add_header Content-Type application/json;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location = /health {
|
||||
access_log off;
|
||||
add_header 'Content-Type' 'application/json';
|
||||
return 200 '{"status":"UP"}';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
/subsystem=logging/logger=org.jooq.tools:add()
|
||||
/subsystem=logging/logger=org.jooq.tools:write-attribute(name=level, value=DEBUG)
|
||||
/subsystem=logging/logger=org.jooq.tools:add-handler(name=CONSOLE)
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
xa-data-source add \
|
||||
--name=AppDS \
|
||||
--enabled=true \
|
||||
--driver-name=postgresql \
|
||||
--jndi-name=java:/webbpm/AppDS \
|
||||
--user-name=${env.DB_APP_USERNAME:app_user} \
|
||||
--password=${env.DB_APP_PASSWORD:apppassword} \
|
||||
--use-ccm=true \
|
||||
--valid-connection-checker-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker \
|
||||
--validate-on-match=false \
|
||||
--background-validation=true \
|
||||
--background-validation-millis=5000 \
|
||||
--exception-sorter-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter \
|
||||
--statistics-enabled=true \
|
||||
--max-pool-size=50 \
|
||||
--query-timeout=300 \
|
||||
--xa-datasource-properties=ServerName=${env.DB_APP_HOST:db},PortNumber=${env.DB_APP_PORT:5432},DatabaseName=${env.DB_APP_NAME:app}
|
||||
|
||||
xa-data-source add \
|
||||
--name=SECURITYDS \
|
||||
--enabled=true \
|
||||
--driver-name=postgresql \
|
||||
--jndi-name=java:/webbpm/security-ds \
|
||||
--user-name=${env.DB_SEC_USERNAME:security_user} \
|
||||
--password=${env.DB_SEC_PASSWORD:secpassword} \
|
||||
--max-pool-size=70 \
|
||||
--valid-connection-checker-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker \
|
||||
--validate-on-match=false \
|
||||
--background-validation=true \
|
||||
--background-validation-millis=5000 \
|
||||
--exception-sorter-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter \
|
||||
--statistics-enabled=true \
|
||||
--query-timeout=300 \
|
||||
--xa-datasource-properties=ServerName=${env.DB_SEC_HOST:db},PortNumber=${env.DB_SEC_PORT:5432},DatabaseName=${env.DB_SEC_NAME:app}
|
||||
|
||||
data-source add \
|
||||
--name=PERSONDS \
|
||||
--enabled=true \
|
||||
--driver-name=postgresql \
|
||||
--connection-url=jdbc:postgresql://${env.DB_PERSON_HOST:db}:${env.DB_PERSON_PORT:5432}/${env.DB_PERSON_NAME:person_registry} \
|
||||
--jndi-name=java:/webbpm/personRegistryDS \
|
||||
--user-name=${env.DB_PERSON_USERNAME:ervu} \
|
||||
--password=${env.DB_PERSON_PASSWORD:ervu} \
|
||||
--valid-connection-checker-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker \
|
||||
--validate-on-match=false \
|
||||
--background-validation=true \
|
||||
--background-validation-millis=5000 \
|
||||
--exception-sorter-class-name=org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter \
|
||||
--statistics-enabled=true \
|
||||
--query-timeout=300 \
|
||||
--max-pool-size=10
|
||||
|
||||
/system-property=ldap.mapping.login.param:add(value=${env.WEBBPM_LDAP_LOGIN_ATTR:uid})
|
||||
/system-property=ldap.mapping.org.code.param:add(value=${env.WEBBPM_LDAP_ORGANIZATION_ATTR:ou})
|
||||
/system-property=jboss.as.management.blocking.timeout:add(value=900)
|
||||
/subsystem=undertow/server=default-server/http-listener=default/:write-attribute(name=record-request-start-time,value=true)
|
||||
/subsystem=undertow/server=default-server/host=default-host/setting=access-log:add(pattern=%h %t "%r" %s %b %D)
|
||||
/system-property=webbpm.cache.hazelcast.hosts:add(value="127.0.0.1")
|
||||
/system-property=webbpm.cache.hazelcast.outbound_port_definitions:add(value="5801-5820")
|
||||
/system-property=webbpm.security.session.active.count:add(value="20")
|
||||
/system-property=security.password.regex:add(value="^((?=(.*\\d){1,})(?=.*[a-zа-яё])(?=.*[A-ZА-ЯЁ]).{8,})$")
|
||||
/system-property=gar.enable:add(value=false)
|
||||
/system-property=fias.enable:add(value=false)
|
||||
/system-property=bpmn.enable:add(value=false)
|
||||
/system-property=config.data.executor.url:add(value="http://localhost:8080/config-data-executor/api")
|
||||
/system-property=config.data.executor.socket.timeout:add(value="10")
|
||||
/system-property=config.data.executor.connection.timeout:add(value="10")
|
||||
/system-property=configDirectory:add(value="/cde-xml")
|
||||
|
|
@ -1 +0,0 @@
|
|||
$JBOSS_HOME/bin/add-user.sh demo@example.com demo
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
/subsystem=datasources/jdbc-driver=postgresql:add( \
|
||||
driver-name="postgresql", \
|
||||
driver-module-name="org.postgresql", \
|
||||
driver-xa-datasource-class-name="org.postgresql.xa.PGXADataSource" \
|
||||
)
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
/system-property=webbpm.mode:add(value=production)
|
||||
/system-property=authentication.method:add(value=form)
|
||||
/subsystem=undertow/configuration=filter/gzip=gzipFilter:add()
|
||||
/subsystem=undertow/server=default-server/host=default-host/\
|
||||
filter-ref=gzipFilter:add(predicate="exists('%{o,Content-Type}') and regex(pattern='(?:application/javascript|text/css|text/html|text/xml|application/json)(;.*)?', value=%{o,Content-Type}, full-match=true)")
|
||||
/subsystem=undertow/configuration=filter/response-header=vary-header:add(header-name="Vary", header-value="Accept-Encoding")
|
||||
/subsystem=undertow/server=default-server/host=default-host/filter-ref=vary-header:add()
|
||||
/subsystem=undertow/server=default-server/http-listener=default/:write-attribute(name=max-post-size,value=${env.MAX_POST_SIZE:104857600})
|
||||
data-source remove --name=ExampleDS
|
||||
/subsystem=ee/service=default-bindings:remove
|
||||
/system-property=jboss.bind.address.management:add(value=0.0.0.0)
|
||||
/system-property=jboss.bind.address:add(value=0.0.0.0)
|
||||
module add --name=org.postgresql --resources=./patches/system/postgresql-driver.jar --dependencies=javax.api,javax.transaction.api
|
||||
shutdown --restart
|
||||
19
config/settings.xml
Normal file
19
config/settings.xml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
<mirrors>
|
||||
<mirror>
|
||||
<id>rtsk-ext</id>
|
||||
<name>rtsk ext</name>
|
||||
<url>https://nexus.ervu.rt-sk.ru/repository/micord-maven/</url>
|
||||
<mirrorOf>micord</mirrorOf>
|
||||
</mirror>
|
||||
<mirror>
|
||||
<id>central</id>
|
||||
<name>Central mirror</name>
|
||||
<url>https://nexus.ervu.rt-sk.ru/repository/maven-public/</url>
|
||||
<mirrorOf>*</mirrorOf>
|
||||
</mirror>
|
||||
</mirrors>
|
||||
</settings>
|
||||
|
|
@ -1,590 +0,0 @@
|
|||
<?xml version="1.0" ?>
|
||||
|
||||
<server xmlns="urn:jboss:domain:19.0">
|
||||
<extensions>
|
||||
<extension module="org.jboss.as.clustering.infinispan"/>
|
||||
<extension module="org.jboss.as.connector"/>
|
||||
<extension module="org.jboss.as.deployment-scanner"/>
|
||||
<extension module="org.jboss.as.ee"/>
|
||||
<extension module="org.jboss.as.ejb3"/>
|
||||
<extension module="org.jboss.as.jaxrs"/>
|
||||
<extension module="org.jboss.as.jdr"/>
|
||||
<extension module="org.jboss.as.jmx"/>
|
||||
<extension module="org.jboss.as.jpa"/>
|
||||
<extension module="org.jboss.as.jsf"/>
|
||||
<extension module="org.jboss.as.logging"/>
|
||||
<extension module="org.jboss.as.mail"/>
|
||||
<extension module="org.jboss.as.naming"/>
|
||||
<extension module="org.jboss.as.pojo"/>
|
||||
<extension module="org.jboss.as.remoting"/>
|
||||
<extension module="org.jboss.as.sar"/>
|
||||
<extension module="org.jboss.as.transactions"/>
|
||||
<extension module="org.jboss.as.webservices"/>
|
||||
<extension module="org.jboss.as.weld"/>
|
||||
<extension module="org.wildfly.extension.batch.jberet"/>
|
||||
<extension module="org.wildfly.extension.bean-validation"/>
|
||||
<extension module="org.wildfly.extension.clustering.web"/>
|
||||
<extension module="org.wildfly.extension.core-management"/>
|
||||
<extension module="org.wildfly.extension.discovery"/>
|
||||
<extension module="org.wildfly.extension.ee-security"/>
|
||||
<extension module="org.wildfly.extension.elytron"/>
|
||||
<extension module="org.wildfly.extension.elytron-oidc-client"/>
|
||||
<extension module="org.wildfly.extension.health"/>
|
||||
<extension module="org.wildfly.extension.io"/>
|
||||
<extension module="org.wildfly.extension.metrics"/>
|
||||
<extension module="org.wildfly.extension.microprofile.config-smallrye"/>
|
||||
<extension module="org.wildfly.extension.microprofile.jwt-smallrye"/>
|
||||
<extension module="org.wildfly.extension.microprofile.opentracing-smallrye"/>
|
||||
<extension module="org.wildfly.extension.request-controller"/>
|
||||
<extension module="org.wildfly.extension.security.manager"/>
|
||||
<extension module="org.wildfly.extension.undertow"/>
|
||||
</extensions>
|
||||
<system-properties>
|
||||
<property name="webbpm.mode" value="production"/>
|
||||
<property name="authentication.method" value="form"/>
|
||||
<property name="jboss.bind.address.management" value="0.0.0.0"/>
|
||||
<property name="jboss.bind.address" value="0.0.0.0"/>
|
||||
<property name="ldap.mapping.login.param" value="${env.WEBBPM_LDAP_LOGIN_ATTR:uid}"/>
|
||||
<property name="ldap.mapping.org.code.param" value="${env.WEBBPM_LDAP_ORGANIZATION_ATTR:ou}"/>
|
||||
<property name="jboss.as.management.blocking.timeout" value="900"/>
|
||||
<property name="webbpm.cache.hazelcast.hosts" value="127.0.0.1"/>
|
||||
<property name="webbpm.cache.hazelcast.outbound_port_definitions" value="5801-5820"/>
|
||||
<property name="webbpm.security.session.active.count" value="20"/>
|
||||
<property name="security.password.regex" value="^((?=(.*\d){1,})(?=.*[a-zа-яё])(?=.*[A-ZА-ЯЁ]).{8,})$"/>
|
||||
<property name="gar.enable" value="false"/>
|
||||
<property name="fias.enable" value="false"/>
|
||||
<property name="bpmn.enable" value="false"/>
|
||||
<property name="com.arjuna.ats.arjuna.allowMultipleLastResources" value="true"/>
|
||||
<property name="config.data.executor.url" value="http://localhost:8080/config-data-executor/api"/>
|
||||
<property name="config.data.executor.socket.timeout" value="10"/>
|
||||
<property name="config.data.executor.connection.timeout" value="10"/>
|
||||
<property name="configDirectory" value="C:\work\ervu-eks\config-data-executor\config-examples"/>
|
||||
</system-properties>
|
||||
<management>
|
||||
<audit-log>
|
||||
<formatters>
|
||||
<json-formatter name="json-formatter"/>
|
||||
</formatters>
|
||||
<handlers>
|
||||
<file-handler name="file" formatter="json-formatter" path="audit-log.log" relative-to="jboss.server.data.dir"/>
|
||||
</handlers>
|
||||
<logger log-boot="true" log-read-only="false" enabled="false">
|
||||
<handlers>
|
||||
<handler name="file"/>
|
||||
</handlers>
|
||||
</logger>
|
||||
</audit-log>
|
||||
<management-interfaces>
|
||||
<http-interface http-authentication-factory="management-http-authentication">
|
||||
<http-upgrade enabled="true" sasl-authentication-factory="management-sasl-authentication"/>
|
||||
<socket-binding http="management-http"/>
|
||||
</http-interface>
|
||||
</management-interfaces>
|
||||
<access-control provider="simple">
|
||||
<role-mapping>
|
||||
<role name="SuperUser">
|
||||
<include>
|
||||
<user name="$local"/>
|
||||
</include>
|
||||
</role>
|
||||
</role-mapping>
|
||||
</access-control>
|
||||
</management>
|
||||
<profile>
|
||||
<subsystem xmlns="urn:jboss:domain:logging:8.0">
|
||||
<console-handler name="CONSOLE">
|
||||
<level name="INFO"/>
|
||||
<formatter>
|
||||
<named-formatter name="COLOR-PATTERN"/>
|
||||
</formatter>
|
||||
</console-handler>
|
||||
<periodic-rotating-file-handler name="FILE" autoflush="true">
|
||||
<formatter>
|
||||
<named-formatter name="PATTERN"/>
|
||||
</formatter>
|
||||
<file relative-to="jboss.server.log.dir" path="server.log"/>
|
||||
<suffix value=".yyyy-MM-dd"/>
|
||||
<append value="true"/>
|
||||
</periodic-rotating-file-handler>
|
||||
<logger category="com.arjuna">
|
||||
<level name="WARN"/>
|
||||
</logger>
|
||||
<logger category="io.jaegertracing.Configuration">
|
||||
<level name="WARN"/>
|
||||
</logger>
|
||||
<logger category="org.jboss.as.config">
|
||||
<level name="DEBUG"/>
|
||||
</logger>
|
||||
<logger category="sun.rmi">
|
||||
<level name="WARN"/>
|
||||
</logger>
|
||||
<logger category="org.jooq.tools">
|
||||
<level name="DEBUG"/>
|
||||
<handlers>
|
||||
<handler name="CONSOLE"/>
|
||||
</handlers>
|
||||
</logger>
|
||||
<root-logger>
|
||||
<level name="INFO"/>
|
||||
<handlers>
|
||||
<handler name="CONSOLE"/>
|
||||
<handler name="FILE"/>
|
||||
</handlers>
|
||||
</root-logger>
|
||||
<formatter name="PATTERN">
|
||||
<pattern-formatter pattern="%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n"/>
|
||||
</formatter>
|
||||
<formatter name="COLOR-PATTERN">
|
||||
<pattern-formatter pattern="%K{level}%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n"/>
|
||||
</formatter>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:batch-jberet:3.0">
|
||||
<default-job-repository name="in-memory"/>
|
||||
<default-thread-pool name="batch"/>
|
||||
<job-repository name="in-memory">
|
||||
<in-memory/>
|
||||
</job-repository>
|
||||
<thread-pool name="batch">
|
||||
<max-threads count="10"/>
|
||||
<keepalive-time time="30" unit="seconds"/>
|
||||
</thread-pool>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:bean-validation:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:core-management:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:datasources:7.0">
|
||||
<datasources>
|
||||
<xa-datasource jndi-name="java:/webbpm/AppDS" pool-name="AppDS" enabled="true" use-java-context="true">
|
||||
<xa-datasource-property name="ServerName">10.10.31.118</xa-datasource-property>
|
||||
<xa-datasource-property name="PortNumber">5432</xa-datasource-property>
|
||||
<xa-datasource-property name="DatabaseName">ervu_eks</xa-datasource-property>
|
||||
<driver>postgresql</driver>
|
||||
<security>
|
||||
<user-name>ervu_eks</user-name>
|
||||
<password>ervu_eks</password>
|
||||
</security>
|
||||
<validation>
|
||||
<valid-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker" />
|
||||
<validate-on-match>false</validate-on-match>
|
||||
<background-validation>true</background-validation>
|
||||
<background-validation-millis>5000</background-validation-millis>
|
||||
<exception-sorter class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter" />
|
||||
</validation>
|
||||
</xa-datasource>
|
||||
<xa-datasource jndi-name="java:/webbpm/security-ds" pool-name="SECURITYDS" enabled="true" use-java-context="true">
|
||||
<xa-datasource-property name="ServerName">10.10.31.118</xa-datasource-property>
|
||||
<xa-datasource-property name="PortNumber">5432</xa-datasource-property>
|
||||
<xa-datasource-property name="DatabaseName">ervu_eks</xa-datasource-property>
|
||||
<driver>postgresql</driver>
|
||||
<security>
|
||||
<user-name>ervu_eks_sec</user-name>
|
||||
<password>ervu_eks_sec</password>
|
||||
</security>
|
||||
<validation>
|
||||
<valid-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker" />
|
||||
<validate-on-match>false</validate-on-match>
|
||||
<background-validation>true</background-validation>
|
||||
<background-validation-millis>5000</background-validation-millis>
|
||||
<exception-sorter class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter" />
|
||||
</validation>
|
||||
</xa-datasource>
|
||||
<!-- -->
|
||||
<datasource jndi-name="java:/webbpm/personRegistryDS" pool-name="personRegistryDS" enabled="true" use-java-context="true">
|
||||
<connection-url>jdbc:postgresql://10.10.31.118:5432/person_registry</connection-url>
|
||||
<driver>postgresql</driver>
|
||||
<security>
|
||||
<user-name>ervu</user-name>
|
||||
<password>ervu</password>
|
||||
</security>
|
||||
<validation>
|
||||
<valid-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLValidConnectionChecker"/>
|
||||
<validate-on-match>false</validate-on-match>
|
||||
<background-validation>true</background-validation>
|
||||
<background-validation-millis>5000</background-validation-millis>
|
||||
<exception-sorter class-name="org.jboss.jca.adapters.jdbc.extensions.postgres.PostgreSQLExceptionSorter"/>
|
||||
</validation>
|
||||
<timeout>
|
||||
<query-timeout>300</query-timeout>
|
||||
</timeout>
|
||||
</datasource>
|
||||
<drivers>
|
||||
<driver name="h2" module="com.h2database.h2">
|
||||
<xa-datasource-class>org.h2.jdbcx.JdbcDataSource</xa-datasource-class>
|
||||
</driver>
|
||||
<driver name="postgresql" module="org.postgresql">
|
||||
<xa-datasource-class>org.postgresql.xa.PGXADataSource</xa-datasource-class>
|
||||
</driver>
|
||||
</drivers>
|
||||
</datasources>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:deployment-scanner:2.0">
|
||||
<deployment-scanner path="deployments" relative-to="jboss.server.base.dir" scan-interval="5000" runtime-failure-causes-rollback="${jboss.deployment.scanner.rollback.on.failure:false}"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:discovery:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:distributable-web:2.0" default-session-management="default" default-single-sign-on-management="default">
|
||||
<infinispan-session-management name="default" cache-container="web" granularity="SESSION">
|
||||
<local-affinity/>
|
||||
</infinispan-session-management>
|
||||
<infinispan-single-sign-on-management name="default" cache-container="web" cache="sso"/>
|
||||
<local-routing/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:ee:6.0">
|
||||
<spec-descriptor-property-replacement>false</spec-descriptor-property-replacement>
|
||||
<concurrent>
|
||||
<context-services>
|
||||
<context-service name="default" jndi-name="java:jboss/ee/concurrency/context/default" use-transaction-setup-provider="true"/>
|
||||
</context-services>
|
||||
<managed-thread-factories>
|
||||
<managed-thread-factory name="default" jndi-name="java:jboss/ee/concurrency/factory/default" context-service="default"/>
|
||||
</managed-thread-factories>
|
||||
<managed-executor-services>
|
||||
<managed-executor-service name="default" jndi-name="java:jboss/ee/concurrency/executor/default" context-service="default" hung-task-termination-period="0" hung-task-threshold="60000" keepalive-time="5000"/>
|
||||
</managed-executor-services>
|
||||
<managed-scheduled-executor-services>
|
||||
<managed-scheduled-executor-service name="default" jndi-name="java:jboss/ee/concurrency/scheduler/default" context-service="default" hung-task-termination-period="0" hung-task-threshold="60000" keepalive-time="3000"/>
|
||||
</managed-scheduled-executor-services>
|
||||
</concurrent>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:ee-security:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:ejb3:9.0">
|
||||
<session-bean>
|
||||
<stateless>
|
||||
<bean-instance-pool-ref pool-name="slsb-strict-max-pool"/>
|
||||
</stateless>
|
||||
<stateful default-access-timeout="5000" cache-ref="simple" passivation-disabled-cache-ref="simple"/>
|
||||
<singleton default-access-timeout="5000"/>
|
||||
</session-bean>
|
||||
<pools>
|
||||
<bean-instance-pools>
|
||||
<strict-max-pool name="mdb-strict-max-pool" derive-size="from-cpu-count" instance-acquisition-timeout="5" instance-acquisition-timeout-unit="MINUTES"/>
|
||||
<strict-max-pool name="slsb-strict-max-pool" derive-size="from-worker-pools" instance-acquisition-timeout="5" instance-acquisition-timeout-unit="MINUTES"/>
|
||||
</bean-instance-pools>
|
||||
</pools>
|
||||
<caches>
|
||||
<cache name="simple"/>
|
||||
<cache name="distributable" passivation-store-ref="infinispan" aliases="passivating clustered"/>
|
||||
</caches>
|
||||
<passivation-stores>
|
||||
<passivation-store name="infinispan" cache-container="ejb" max-size="10000"/>
|
||||
</passivation-stores>
|
||||
<async thread-pool-name="default"/>
|
||||
<timer-service thread-pool-name="default" default-data-store="default-file-store">
|
||||
<data-stores>
|
||||
<file-data-store name="default-file-store" path="timer-service-data" relative-to="jboss.server.data.dir"/>
|
||||
</data-stores>
|
||||
</timer-service>
|
||||
<remote cluster="ejb" connectors="http-remoting-connector" thread-pool-name="default">
|
||||
<channel-creation-options>
|
||||
<option name="MAX_OUTBOUND_MESSAGES" value="1234" type="remoting"/>
|
||||
</channel-creation-options>
|
||||
</remote>
|
||||
<thread-pools>
|
||||
<thread-pool name="default">
|
||||
<max-threads count="10"/>
|
||||
<keepalive-time time="60" unit="seconds"/>
|
||||
</thread-pool>
|
||||
</thread-pools>
|
||||
<default-security-domain value="other"/>
|
||||
<application-security-domains>
|
||||
<application-security-domain name="other" security-domain="ApplicationDomain"/>
|
||||
</application-security-domains>
|
||||
<default-missing-method-permissions-deny-access value="true"/>
|
||||
<statistics enabled="${wildfly.ejb3.statistics-enabled:${wildfly.statistics-enabled:false}}"/>
|
||||
<log-system-exceptions value="true"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:wildfly:elytron:15.1" final-providers="combined-providers" disallowed-providers="OracleUcrypto">
|
||||
<providers>
|
||||
<aggregate-providers name="combined-providers">
|
||||
<providers name="elytron"/>
|
||||
<providers name="openssl"/>
|
||||
</aggregate-providers>
|
||||
<provider-loader name="elytron" module="org.wildfly.security.elytron"/>
|
||||
<provider-loader name="openssl" module="org.wildfly.openssl"/>
|
||||
</providers>
|
||||
<audit-logging>
|
||||
<file-audit-log name="local-audit" path="audit.log" relative-to="jboss.server.log.dir" format="JSON"/>
|
||||
</audit-logging>
|
||||
<security-domains>
|
||||
<security-domain name="ManagementDomain" default-realm="ManagementRealm" permission-mapper="default-permission-mapper">
|
||||
<realm name="ManagementRealm" role-decoder="groups-to-roles"/>
|
||||
<realm name="local" role-mapper="super-user-mapper"/>
|
||||
</security-domain>
|
||||
<security-domain name="ApplicationDomain" default-realm="ApplicationRealm" permission-mapper="default-permission-mapper">
|
||||
<realm name="ApplicationRealm" role-decoder="groups-to-roles"/>
|
||||
<realm name="local"/>
|
||||
</security-domain>
|
||||
</security-domains>
|
||||
<security-realms>
|
||||
<identity-realm name="local" identity="$local"/>
|
||||
<properties-realm name="ApplicationRealm">
|
||||
<users-properties path="application-users.properties" relative-to="jboss.server.config.dir" digest-realm-name="ApplicationRealm"/>
|
||||
<groups-properties path="application-roles.properties" relative-to="jboss.server.config.dir"/>
|
||||
</properties-realm>
|
||||
<properties-realm name="ManagementRealm">
|
||||
<users-properties path="mgmt-users.properties" relative-to="jboss.server.config.dir" digest-realm-name="ManagementRealm"/>
|
||||
<groups-properties path="mgmt-groups.properties" relative-to="jboss.server.config.dir"/>
|
||||
</properties-realm>
|
||||
</security-realms>
|
||||
<mappers>
|
||||
<simple-permission-mapper name="default-permission-mapper" mapping-mode="first">
|
||||
<permission-mapping>
|
||||
<principal name="anonymous"/>
|
||||
<permission-set name="default-permissions"/>
|
||||
</permission-mapping>
|
||||
<permission-mapping match-all="true">
|
||||
<permission-set name="login-permission"/>
|
||||
<permission-set name="default-permissions"/>
|
||||
</permission-mapping>
|
||||
</simple-permission-mapper>
|
||||
<constant-realm-mapper name="local" realm-name="local"/>
|
||||
<simple-role-decoder name="groups-to-roles" attribute="groups"/>
|
||||
<constant-role-mapper name="super-user-mapper">
|
||||
<role name="SuperUser"/>
|
||||
</constant-role-mapper>
|
||||
</mappers>
|
||||
<permission-sets>
|
||||
<permission-set name="login-permission">
|
||||
<permission class-name="org.wildfly.security.auth.permission.LoginPermission"/>
|
||||
</permission-set>
|
||||
<permission-set name="default-permissions">
|
||||
<permission class-name="org.wildfly.extension.batch.jberet.deployment.BatchPermission" module="org.wildfly.extension.batch.jberet" target-name="*"/>
|
||||
<permission class-name="org.wildfly.transaction.client.RemoteTransactionPermission" module="org.wildfly.transaction.client"/>
|
||||
<permission class-name="org.jboss.ejb.client.RemoteEJBPermission" module="org.jboss.ejb-client"/>
|
||||
</permission-set>
|
||||
</permission-sets>
|
||||
<http>
|
||||
<http-authentication-factory name="management-http-authentication" security-domain="ManagementDomain" http-server-mechanism-factory="global">
|
||||
<mechanism-configuration>
|
||||
<mechanism mechanism-name="DIGEST">
|
||||
<mechanism-realm realm-name="ManagementRealm"/>
|
||||
</mechanism>
|
||||
</mechanism-configuration>
|
||||
</http-authentication-factory>
|
||||
<http-authentication-factory name="application-http-authentication" security-domain="ApplicationDomain" http-server-mechanism-factory="global">
|
||||
<mechanism-configuration>
|
||||
<mechanism mechanism-name="BASIC">
|
||||
<mechanism-realm realm-name="ApplicationRealm"/>
|
||||
</mechanism>
|
||||
</mechanism-configuration>
|
||||
</http-authentication-factory>
|
||||
<provider-http-server-mechanism-factory name="global"/>
|
||||
</http>
|
||||
<sasl>
|
||||
<sasl-authentication-factory name="management-sasl-authentication" sasl-server-factory="configured" security-domain="ManagementDomain">
|
||||
<mechanism-configuration>
|
||||
<mechanism mechanism-name="JBOSS-LOCAL-USER" realm-mapper="local"/>
|
||||
<mechanism mechanism-name="DIGEST-MD5">
|
||||
<mechanism-realm realm-name="ManagementRealm"/>
|
||||
</mechanism>
|
||||
</mechanism-configuration>
|
||||
</sasl-authentication-factory>
|
||||
<sasl-authentication-factory name="application-sasl-authentication" sasl-server-factory="configured" security-domain="ApplicationDomain">
|
||||
<mechanism-configuration>
|
||||
<mechanism mechanism-name="JBOSS-LOCAL-USER" realm-mapper="local"/>
|
||||
<mechanism mechanism-name="DIGEST-MD5">
|
||||
<mechanism-realm realm-name="ApplicationRealm"/>
|
||||
</mechanism>
|
||||
</mechanism-configuration>
|
||||
</sasl-authentication-factory>
|
||||
<configurable-sasl-server-factory name="configured" sasl-server-factory="elytron">
|
||||
<properties>
|
||||
<property name="wildfly.sasl.local-user.default-user" value="$local"/>
|
||||
<property name="wildfly.sasl.local-user.challenge-path" value="${jboss.server.temp.dir}/auth"/>
|
||||
</properties>
|
||||
</configurable-sasl-server-factory>
|
||||
<mechanism-provider-filtering-sasl-server-factory name="elytron" sasl-server-factory="global">
|
||||
<filters>
|
||||
<filter provider-name="WildFlyElytron"/>
|
||||
</filters>
|
||||
</mechanism-provider-filtering-sasl-server-factory>
|
||||
<provider-sasl-server-factory name="global"/>
|
||||
</sasl>
|
||||
<tls>
|
||||
<key-stores>
|
||||
<key-store name="applicationKS">
|
||||
<credential-reference clear-text="password"/>
|
||||
<implementation type="JKS"/>
|
||||
<file path="application.keystore" relative-to="jboss.server.config.dir"/>
|
||||
</key-store>
|
||||
</key-stores>
|
||||
<key-managers>
|
||||
<key-manager name="applicationKM" key-store="applicationKS" generate-self-signed-certificate-host="localhost">
|
||||
<credential-reference clear-text="password"/>
|
||||
</key-manager>
|
||||
</key-managers>
|
||||
<server-ssl-contexts>
|
||||
<server-ssl-context name="applicationSSC" key-manager="applicationKM"/>
|
||||
</server-ssl-contexts>
|
||||
</tls>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:wildfly:elytron-oidc-client:1.0"/>
|
||||
<subsystem xmlns="urn:wildfly:health:1.0" security-enabled="false"/>
|
||||
<subsystem xmlns="urn:jboss:domain:infinispan:13.0">
|
||||
<cache-container name="ejb" default-cache="passivation" marshaller="PROTOSTREAM" aliases="sfsb" modules="org.wildfly.clustering.ejb.infinispan">
|
||||
<local-cache name="passivation">
|
||||
<expiration interval="0"/>
|
||||
<file-store passivation="true" purge="false"/>
|
||||
</local-cache>
|
||||
</cache-container>
|
||||
<cache-container name="web" default-cache="passivation" marshaller="PROTOSTREAM" modules="org.wildfly.clustering.web.infinispan">
|
||||
<local-cache name="passivation">
|
||||
<expiration interval="0"/>
|
||||
<file-store passivation="true" purge="false"/>
|
||||
</local-cache>
|
||||
<local-cache name="sso">
|
||||
<expiration interval="0"/>
|
||||
</local-cache>
|
||||
</cache-container>
|
||||
<cache-container name="server" default-cache="default" marshaller="PROTOSTREAM" modules="org.wildfly.clustering.server">
|
||||
<local-cache name="default">
|
||||
<expiration interval="0"/>
|
||||
</local-cache>
|
||||
</cache-container>
|
||||
<cache-container name="hibernate" marshaller="JBOSS" modules="org.infinispan.hibernate-cache">
|
||||
<local-cache name="entity">
|
||||
<heap-memory size="10000"/>
|
||||
<expiration max-idle="100000"/>
|
||||
</local-cache>
|
||||
<local-cache name="local-query">
|
||||
<heap-memory size="10000"/>
|
||||
<expiration max-idle="100000"/>
|
||||
</local-cache>
|
||||
<local-cache name="timestamps">
|
||||
<expiration interval="0"/>
|
||||
</local-cache>
|
||||
<local-cache name="pending-puts">
|
||||
<expiration max-idle="60000"/>
|
||||
</local-cache>
|
||||
</cache-container>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:io:3.0">
|
||||
<worker name="default"/>
|
||||
<buffer-pool name="default"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:jaxrs:2.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:jca:5.0">
|
||||
<archive-validation enabled="true" fail-on-error="true" fail-on-warn="false"/>
|
||||
<bean-validation enabled="true"/>
|
||||
<default-workmanager>
|
||||
<short-running-threads>
|
||||
<core-threads count="50"/>
|
||||
<queue-length count="50"/>
|
||||
<max-threads count="50"/>
|
||||
<keepalive-time time="10" unit="seconds"/>
|
||||
</short-running-threads>
|
||||
<long-running-threads>
|
||||
<core-threads count="50"/>
|
||||
<queue-length count="50"/>
|
||||
<max-threads count="50"/>
|
||||
<keepalive-time time="10" unit="seconds"/>
|
||||
</long-running-threads>
|
||||
</default-workmanager>
|
||||
<cached-connection-manager/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:jdr:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:jmx:1.3">
|
||||
<expose-resolved-model/>
|
||||
<expose-expression-model/>
|
||||
<remoting-connector/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:jpa:1.1">
|
||||
<jpa default-extended-persistence-inheritance="DEEP"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:jsf:1.1"/>
|
||||
<subsystem xmlns="urn:jboss:domain:mail:4.0">
|
||||
<mail-session name="default" jndi-name="java:jboss/mail/Default">
|
||||
<smtp-server outbound-socket-binding-ref="mail-smtp"/>
|
||||
</mail-session>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:wildfly:metrics:1.0" security-enabled="false" exposed-subsystems="*" prefix="${wildfly.metrics.prefix:wildfly}"/>
|
||||
<subsystem xmlns="urn:wildfly:microprofile-config-smallrye:2.0"/>
|
||||
<subsystem xmlns="urn:wildfly:microprofile-jwt-smallrye:1.0"/>
|
||||
<subsystem xmlns="urn:wildfly:microprofile-opentracing-smallrye:3.0" default-tracer="jaeger">
|
||||
<jaeger-tracer name="jaeger">
|
||||
<sampler-configuration sampler-type="const" sampler-param="1.0"/>
|
||||
</jaeger-tracer>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:naming:2.0">
|
||||
<remote-naming/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:pojo:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:remoting:4.0">
|
||||
<http-connector name="http-remoting-connector" connector-ref="default" sasl-authentication-factory="application-sasl-authentication"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:request-controller:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:resource-adapters:6.1"/>
|
||||
<subsystem xmlns="urn:jboss:domain:sar:1.0"/>
|
||||
<subsystem xmlns="urn:jboss:domain:security-manager:1.0">
|
||||
<deployment-permissions>
|
||||
<maximum-set>
|
||||
<permission class="java.security.AllPermission"/>
|
||||
</maximum-set>
|
||||
</deployment-permissions>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:transactions:6.0">
|
||||
<core-environment node-identifier="${jboss.tx.node.id:1}">
|
||||
<process-id>
|
||||
<uuid/>
|
||||
</process-id>
|
||||
</core-environment>
|
||||
<recovery-environment socket-binding="txn-recovery-environment" status-socket-binding="txn-status-manager"/>
|
||||
<coordinator-environment statistics-enabled="${wildfly.transactions.statistics-enabled:${wildfly.statistics-enabled:false}}"/>
|
||||
<object-store path="tx-object-store" relative-to="jboss.server.data.dir"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:undertow:12.0" default-server="default-server" default-virtual-host="default-host" default-servlet-container="default" default-security-domain="other" statistics-enabled="${wildfly.undertow.statistics-enabled:${wildfly.statistics-enabled:false}}">
|
||||
<buffer-cache name="default"/>
|
||||
<server name="default-server">
|
||||
<http-listener name="default" socket-binding="http" max-post-size="${env.MAX_POST_SIZE:104857600}" record-request-start-time="true" redirect-socket="https" enable-http2="true"/>
|
||||
<https-listener name="https" socket-binding="https" ssl-context="applicationSSC" enable-http2="true"/>
|
||||
<host name="default-host" alias="localhost">
|
||||
<location name="/" handler="welcome-content"/>
|
||||
<filter-ref name="cache-control" predicate="path-suffix['.bpmn'] or path-suffix['.bpmn2']"/>
|
||||
<http-invoker http-authentication-factory="application-http-authentication"/>
|
||||
</host>
|
||||
</server>
|
||||
<servlet-container name="default">
|
||||
<jsp-config/>
|
||||
<websockets/>
|
||||
</servlet-container>
|
||||
<handlers>
|
||||
<file name="welcome-content" path="${jboss.home.dir}/welcome-content"/>
|
||||
</handlers>
|
||||
<filters>
|
||||
<response-header name="cache-control" header-name="Cache-Control" header-value="no-store"/>
|
||||
</filters>
|
||||
<application-security-domains>
|
||||
<application-security-domain name="other" security-domain="ApplicationDomain"/>
|
||||
</application-security-domains>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:webservices:2.0" statistics-enabled="${wildfly.webservices.statistics-enabled:${wildfly.statistics-enabled:false}}">
|
||||
<wsdl-host>${jboss.bind.address:127.0.0.1}</wsdl-host>
|
||||
<endpoint-config name="Standard-Endpoint-Config"/>
|
||||
<endpoint-config name="Recording-Endpoint-Config">
|
||||
<pre-handler-chain name="recording-handlers" protocol-bindings="##SOAP11_HTTP ##SOAP11_HTTP_MTOM ##SOAP12_HTTP ##SOAP12_HTTP_MTOM">
|
||||
<handler name="RecordingHandler" class="org.jboss.ws.common.invocation.RecordingServerHandler"/>
|
||||
</pre-handler-chain>
|
||||
</endpoint-config>
|
||||
<client-config name="Standard-Client-Config"/>
|
||||
</subsystem>
|
||||
<subsystem xmlns="urn:jboss:domain:weld:4.0"/>
|
||||
</profile>
|
||||
<interfaces>
|
||||
<interface name="management">
|
||||
<inet-address value="${jboss.bind.address.management:0.0.0.0}"/>
|
||||
</interface>
|
||||
<interface name="public">
|
||||
<inet-address value="${jboss.bind.address:0.0.0.0}"/>
|
||||
</interface>
|
||||
</interfaces>
|
||||
<socket-binding-group name="standard-sockets" default-interface="public" port-offset="${jboss.socket.binding.port-offset:0}">
|
||||
<socket-binding name="ajp" port="${jboss.ajp.port:8009}"/>
|
||||
<socket-binding name="http" port="${jboss.http.port:8080}"/>
|
||||
<socket-binding name="https" port="${jboss.https.port:8443}"/>
|
||||
<socket-binding name="management-http" interface="management" port="${jboss.management.http.port:9990}"/>
|
||||
<socket-binding name="management-https" interface="management" port="${jboss.management.https.port:9993}"/>
|
||||
<socket-binding name="txn-recovery-environment" port="4712"/>
|
||||
<socket-binding name="txn-status-manager" port="4713"/>
|
||||
<outbound-socket-binding name="mail-smtp">
|
||||
<remote-destination host="${jboss.mail.server.host:localhost}" port="${jboss.mail.server.port:25}"/>
|
||||
</outbound-socket-binding>
|
||||
</socket-binding-group>
|
||||
</server>
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>ru.micord.ervu</groupId>
|
||||
<artifactId>eks</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>distribution</artifactId>
|
||||
<packaging>ear</packaging>
|
||||
|
||||
<properties>
|
||||
<backendContext>/backend</backendContext>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
<type>war</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>frontend</artifactId>
|
||||
<type>war</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-ear-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<modules>
|
||||
<webModule>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>frontend</artifactId>
|
||||
<contextRoot>/</contextRoot>
|
||||
<bundleFileName>frontend.war</bundleFileName>
|
||||
</webModule>
|
||||
<webModule>
|
||||
<groupId>ru.micord.ervu.eks</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
<contextRoot>${backendContext}</contextRoot>
|
||||
<bundleFileName>backend.war</bundleFileName>
|
||||
</webModule>
|
||||
</modules>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<finalName>${project.parent.artifactId}</finalName>
|
||||
</build>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>enable-version-in-url</id>
|
||||
<properties>
|
||||
<backendContext>/backend-${project.version}</backendContext>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
12
frontend/.gitignore
vendored
Normal file
12
frontend/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# frameworks dirs
|
||||
.angular
|
||||
.nx
|
||||
|
||||
# compiled output
|
||||
dist
|
||||
tmp
|
||||
out-tsc
|
||||
|
||||
# generated by webbpm
|
||||
tsconfig.base.json
|
||||
src/ts/page.routing.ts
|
||||
3
frontend/.nxignore
Normal file
3
frontend/.nxignore
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
!modules/generated/
|
||||
!tsconfig.base.json
|
||||
!src/ts/page.routing.ts
|
||||
6
frontend/.prettierignore
Normal file
6
frontend/.prettierignore
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Add files here to ignore them from prettier formatting
|
||||
/dist
|
||||
/coverage
|
||||
/.nx/cache
|
||||
/.nx/workspace-data
|
||||
.angular
|
||||
3
frontend/.prettierrc
Normal file
3
frontend/.prettierrc
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
{
|
||||
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
|
||||
"version": 1,
|
||||
"newProjectRoot": "projects",
|
||||
"projects": {
|
||||
"webbpm-frontend": {
|
||||
"root": "",
|
||||
"sourceRoot": "src",
|
||||
"projectType": "application",
|
||||
"architect": {
|
||||
"build": {
|
||||
"builder": "@angular-devkit/build-angular:browser",
|
||||
"options": {
|
||||
"outputPath": "dist",
|
||||
"index": "src/index.html",
|
||||
"main": "src/ts/main.ts",
|
||||
"tsConfig": "src/tsconfig.json",
|
||||
"polyfills": "src/ts/polyfills.ts",
|
||||
"assets": [
|
||||
"src/resources"
|
||||
],
|
||||
"styles": [
|
||||
],
|
||||
"scripts": [
|
||||
"node_modules/jquery/dist/jquery.min.js",
|
||||
"node_modules/moment/min/moment-with-locales.js",
|
||||
"node_modules/moment-timezone/builds/moment-timezone-with-data.min.js",
|
||||
"node_modules/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js",
|
||||
"node_modules/selectize/dist/js/standalone/selectize.min.js",
|
||||
"node_modules/downloadjs/download.min.js"
|
||||
]
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"optimization": true,
|
||||
"outputHashing": "all",
|
||||
"sourceMap": false,
|
||||
"extractCss": true,
|
||||
"namedChunks": false,
|
||||
"aot": true,
|
||||
"extractLicenses": true,
|
||||
"vendorChunk": false,
|
||||
"buildOptimizer": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"options": {
|
||||
"browserTarget": "webbpm-frontend:build"
|
||||
},
|
||||
"configurations": {}
|
||||
},
|
||||
"extract-i18n": {
|
||||
"builder": "@angular-devkit/build-angular:extract-i18n",
|
||||
"options": {
|
||||
"browserTarget": "webbpm-frontend:build"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"builder": "@angular-devkit/build-angular:tslint",
|
||||
"options": {
|
||||
"tsConfig": [],
|
||||
"exclude": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultProject": "webbpm-frontend"
|
||||
}
|
||||
42
frontend/eslint.config.mjs
Normal file
42
frontend/eslint.config.mjs
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import nx from '@nx/eslint-plugin';
|
||||
|
||||
export default [
|
||||
...nx.configs['flat/base'],
|
||||
...nx.configs['flat/typescript'],
|
||||
...nx.configs['flat/javascript'],
|
||||
{
|
||||
ignores: ['**/dist'],
|
||||
},
|
||||
{
|
||||
files: ['**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx'],
|
||||
rules: {
|
||||
'@nx/enforce-module-boundaries': [
|
||||
'error',
|
||||
{
|
||||
enforceBuildableLibDependency: true,
|
||||
allow: ['^.*/eslint(\\.base)?\\.config\\.[cm]?js$'],
|
||||
depConstraints: [
|
||||
{
|
||||
sourceTag: '*',
|
||||
onlyDependOnLibsWithTags: ['*'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'**/*.ts',
|
||||
'**/*.tsx',
|
||||
'**/*.cts',
|
||||
'**/*.mts',
|
||||
'**/*.js',
|
||||
'**/*.jsx',
|
||||
'**/*.cjs',
|
||||
'**/*.mjs',
|
||||
],
|
||||
// Override or add rules here
|
||||
rules: {},
|
||||
},
|
||||
];
|
||||
|
|
@ -1,23 +1,14 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>ervu-eks</title>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
<link rel="icon" type="image/png" href="src/resources/img/logo.png"/>
|
||||
<link rel="stylesheet" href="src/resources/css/style.css"/>
|
||||
|
||||
<script src="node_modules/core-js/client/shim.min.js"></script>
|
||||
<script src="node_modules/zone.js/dist/zone.js"></script>
|
||||
<script src="node_modules/reflect-metadata/Reflect.js"></script>
|
||||
<script src="node_modules/systemjs/dist/system.src.js"></script>
|
||||
<script src="systemjs.config.js"></script>
|
||||
<script>
|
||||
System.import('webbpm').catch(function (err) {
|
||||
console.error(err);
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body webbpm class="webbpm ervu-eks">
|
||||
<div class="progress"></div>
|
||||
</body>
|
||||
</html>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>ervu-eks</title>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||
<link rel="icon" type="image/png" href="resources/img/logo.png"/>
|
||||
</head>
|
||||
<body class="webbpm ervu-eks">
|
||||
<app-root>
|
||||
<div class="progress"></div>
|
||||
</app-root>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>ervu-eks</title>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
<link rel="icon" type="image/png" href="src/resources/img/logo.png"/>
|
||||
</head>
|
||||
<body webbpm class="webbpm ervu-eks">
|
||||
<div class="progress"></div>
|
||||
</body>
|
||||
</html>
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue