Merge remote-tracking branch 'origin/feature/SUPPORT-8474_id_ervu' into feature/SUPPORT-8411_fixed
# Conflicts: # backend/src/main/java/ru/micord/ervu/kafka/ReplyingKafkaConfig.java # backend/src/main/java/ru/micord/ervu/security/esia/service/EsiaAuthService.java # config/patches/default.cli # config/standalone/dev/standalone.xml
This commit is contained in:
commit
5bb0f2f92d
25 changed files with 788 additions and 9 deletions
|
|
@ -35,7 +35,8 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||||
"component.addresses",
|
"component.addresses",
|
||||||
"gen",
|
"gen",
|
||||||
"ru.cg",
|
"ru.cg",
|
||||||
"ru.micord"
|
"ru.micord",
|
||||||
|
"ervu"
|
||||||
}, excludeFilters = {
|
}, excludeFilters = {
|
||||||
@ComponentScan.Filter(type = FilterType.REGEX, pattern = "security.WebSecurityConfig")
|
@ComponentScan.Filter(type = FilterType.REGEX, pattern = "security.WebSecurityConfig")
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,63 @@
|
||||||
|
package ervu.client.classified;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.zip.ZipInputStream;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.retry.annotation.Backoff;
|
||||||
|
import org.springframework.retry.annotation.Retryable;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
public class ClassifierOrgClient {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(ClassifierOrgClient.class);
|
||||||
|
|
||||||
|
@Value("${ervu.esnsi.classifier.url.load:#{null}}")
|
||||||
|
private String uri;
|
||||||
|
|
||||||
|
@Retryable(value = {TimeoutException.class}, backoff =
|
||||||
|
@Backoff(delay = 2000))
|
||||||
|
public String getJsonClassifierOrgResponse() {
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpRequest request = HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(uri))
|
||||||
|
.GET()
|
||||||
|
.build();
|
||||||
|
try {
|
||||||
|
HttpResponse<InputStream> response = client.send(request,
|
||||||
|
HttpResponse.BodyHandlers.ofInputStream()
|
||||||
|
);
|
||||||
|
if (response.statusCode() >= 200 && response.statusCode() <= 202) {
|
||||||
|
return unzipJsonFile(new ZipInputStream(response.body()));
|
||||||
|
}
|
||||||
|
logger.debug("Response unsuccessful. Json file has not be unzip.");
|
||||||
|
}
|
||||||
|
catch (IOException | InterruptedException e) {
|
||||||
|
logger.error(e.getMessage(), e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String unzipJsonFile(ZipInputStream zis) throws IOException {
|
||||||
|
if (zis.getNextEntry() != null) {
|
||||||
|
ByteArrayInputStream isr = new ByteArrayInputStream(zis.readAllBytes());
|
||||||
|
try (BufferedReader br = new BufferedReader(new InputStreamReader(isr))) {
|
||||||
|
return br.lines().collect(Collectors.joining(System.lineSeparator()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.error("Zip archive is null");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -38,7 +38,7 @@ public class FileUploadWebDavClient {
|
||||||
.PUT(HttpRequest.BodyPublishers.ofByteArray(multipartFile.getBytes())).build();
|
.PUT(HttpRequest.BodyPublishers.ofByteArray(multipartFile.getBytes())).build();
|
||||||
|
|
||||||
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||||
logger.debug("Response starus code: {}", response.statusCode());
|
logger.debug("Response status code: {}", response.statusCode());
|
||||||
return (response.statusCode() >= 200) && (response.statusCode() <= 202);
|
return (response.statusCode() >= 200) && (response.statusCode() <= 202);
|
||||||
}
|
}
|
||||||
catch (IOException | InterruptedException e) {
|
catch (IOException | InterruptedException e) {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
package ervu.dao.classifier;
|
||||||
|
|
||||||
|
|
||||||
|
import ervu.service.classifier.model.ClassifierAttributeModel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public interface ClassifierAttributeDao {
|
||||||
|
void save(ClassifierAttributeModel[] classifierAttributeModels);
|
||||||
|
|
||||||
|
void deleteIfNotExistRecords();
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
package ervu.dao.classifier;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import ervu.service.classifier.model.ClassifierAttributeModel;
|
||||||
|
import org.jooq.DSLContext;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.Tables.RECORD_ATTRIBUTES;
|
||||||
|
import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.ClassifierAttributes.CLASSIFIER_ATTRIBUTES;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@Repository
|
||||||
|
public class ClassifierAttributeDaoImpl implements ClassifierAttributeDao {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DSLContext dsl;
|
||||||
|
|
||||||
|
public void save(ClassifierAttributeModel[] classifierAttributeModels) {
|
||||||
|
var queries = Arrays.stream(classifierAttributeModels).map(attribute -> {
|
||||||
|
var uid = UUID.fromString(attribute.getUid());
|
||||||
|
return dsl.insertInto(CLASSIFIER_ATTRIBUTES)
|
||||||
|
.set(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID, uid)
|
||||||
|
.set(CLASSIFIER_ATTRIBUTES.ATTRIBUTE_NAME, attribute.getName())
|
||||||
|
.onConflict(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID)
|
||||||
|
.doUpdate()
|
||||||
|
.set(CLASSIFIER_ATTRIBUTES.ATTRIBUTE_NAME, attribute.getName());
|
||||||
|
}).toList();
|
||||||
|
dsl.batch(queries).execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void deleteIfNotExistRecords() {
|
||||||
|
dsl.deleteFrom(CLASSIFIER_ATTRIBUTES).whereNotExists(
|
||||||
|
dsl.selectOne()
|
||||||
|
.from(RECORD_ATTRIBUTES)
|
||||||
|
.where(RECORD_ATTRIBUTES.ATTRIBUTE_ID.eq(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID)))
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
package ervu.dao.classifier;
|
||||||
|
|
||||||
|
|
||||||
|
import ervu.service.classifier.model.RecordModel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public interface RecordAttributesDao {
|
||||||
|
void save(RecordModel[] recordModels, String version);
|
||||||
|
|
||||||
|
String fetchTitleByLeg(String leg);
|
||||||
|
|
||||||
|
void deleteAllByVersion(String version);
|
||||||
|
|
||||||
|
String fetchVersion();
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,83 @@
|
||||||
|
package ervu.dao.classifier;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import ervu.service.classifier.model.RecordModel;
|
||||||
|
import org.jooq.DSLContext;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.ClassifierAttributes.CLASSIFIER_ATTRIBUTES;
|
||||||
|
import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.RecordAttributes.RECORD_ATTRIBUTES;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@Repository
|
||||||
|
public class RecordAttributesDaoImpl implements RecordAttributesDao {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DSLContext dsl;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void save(RecordModel[] recordModels, String version) {
|
||||||
|
var queries = Arrays.stream(recordModels)
|
||||||
|
.flatMap(it -> Arrays.stream(it.getAttributeValues())
|
||||||
|
.map(attribute -> {
|
||||||
|
var recordUid = UUID.fromString(it.getUid());
|
||||||
|
var attributeUid = UUID.fromString(attribute.getAttributeUid());
|
||||||
|
var value = attribute.getValue();
|
||||||
|
|
||||||
|
return dsl.insertInto(RECORD_ATTRIBUTES,
|
||||||
|
RECORD_ATTRIBUTES.RECORD_ID,
|
||||||
|
RECORD_ATTRIBUTES.ATTRIBUTE_ID,
|
||||||
|
RECORD_ATTRIBUTES.ATTRIBUTE_VALUE,
|
||||||
|
RECORD_ATTRIBUTES.VERSION
|
||||||
|
)
|
||||||
|
.values(recordUid, attributeUid, value, version)
|
||||||
|
.onConflict(RECORD_ATTRIBUTES.RECORD_ID, RECORD_ATTRIBUTES.ATTRIBUTE_ID)
|
||||||
|
.doUpdate()
|
||||||
|
.set(RECORD_ATTRIBUTES.ATTRIBUTE_VALUE, value)
|
||||||
|
.set(RECORD_ATTRIBUTES.VERSION, version)
|
||||||
|
.where(RECORD_ATTRIBUTES.RECORD_ID.eq(recordUid)
|
||||||
|
.and(RECORD_ATTRIBUTES.ATTRIBUTE_ID.eq(attributeUid)));
|
||||||
|
}))
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
dsl.batch(queries).execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String fetchVersion() {
|
||||||
|
return dsl.select(RECORD_ATTRIBUTES.VERSION)
|
||||||
|
.from(RECORD_ATTRIBUTES)
|
||||||
|
.limit(1)
|
||||||
|
.fetchOptional(RECORD_ATTRIBUTES.VERSION)
|
||||||
|
.orElse("0");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String fetchTitleByLeg(String leg) {
|
||||||
|
return dsl.select(RECORD_ATTRIBUTES.ATTRIBUTE_VALUE)
|
||||||
|
.from(RECORD_ATTRIBUTES)
|
||||||
|
.join(CLASSIFIER_ATTRIBUTES)
|
||||||
|
.on(RECORD_ATTRIBUTES.ATTRIBUTE_ID.eq(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID))
|
||||||
|
.where(CLASSIFIER_ATTRIBUTES.ATTRIBUTE_NAME.eq("title")
|
||||||
|
.and(RECORD_ATTRIBUTES.RECORD_ID.eq(
|
||||||
|
dsl.select(RECORD_ATTRIBUTES.RECORD_ID)
|
||||||
|
.from(RECORD_ATTRIBUTES)
|
||||||
|
.where(RECORD_ATTRIBUTES.ATTRIBUTE_VALUE.equal(leg))
|
||||||
|
.fetchOneInto(UUID.class)))
|
||||||
|
)
|
||||||
|
.fetchOneInto(String.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void deleteAllByVersion(String version) {
|
||||||
|
dsl.deleteFrom(RECORD_ATTRIBUTES)
|
||||||
|
.where(RECORD_ATTRIBUTES.VERSION.eq(version))
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
package ervu.service.classifier;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public interface RecordAttributesService {
|
||||||
|
|
||||||
|
String findTitleByLeg(String leg);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,22 @@
|
||||||
|
package ervu.service.classifier;
|
||||||
|
|
||||||
|
import ervu.dao.classifier.RecordAttributesDao;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@Service
|
||||||
|
public class RecordAttributesServiceImpl implements RecordAttributesService {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RecordAttributesDao recordAttributesDao;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional(readOnly = true)
|
||||||
|
public String findTitleByLeg(String leg) {
|
||||||
|
return recordAttributesDao.fetchTitleByLeg(leg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,94 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class ClassifierAttributeModel {
|
||||||
|
private final static long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private String uid;
|
||||||
|
|
||||||
|
private String type;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private String techName;
|
||||||
|
|
||||||
|
private Boolean required;
|
||||||
|
|
||||||
|
private Boolean unique;
|
||||||
|
|
||||||
|
private Integer length;
|
||||||
|
|
||||||
|
public String getUid() {
|
||||||
|
return uid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUid(String uid) {
|
||||||
|
this.uid = uid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setType(String type) {
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTechName() {
|
||||||
|
return techName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTechName(String techName) {
|
||||||
|
this.techName = techName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getRequired() {
|
||||||
|
return required;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRequired(Boolean required) {
|
||||||
|
this.required = required;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getUnique() {
|
||||||
|
return unique;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUnique(Boolean unique) {
|
||||||
|
this.unique = unique;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getLength() {
|
||||||
|
return length;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLength(Integer length) {
|
||||||
|
this.length = length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "ClassifierAttribute{" +
|
||||||
|
"uid='" + uid + '\'' +
|
||||||
|
", type='" + type + '\'' +
|
||||||
|
", name='" + name + '\'' +
|
||||||
|
", techName='" + techName + '\'' +
|
||||||
|
", required=" + required +
|
||||||
|
", unique=" + unique +
|
||||||
|
", length=" + length +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,41 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class ClassifierFormModel implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private ClassifierOrgModel classifier;
|
||||||
|
|
||||||
|
private ClassifierNodeModel data;
|
||||||
|
|
||||||
|
public ClassifierOrgModel getClassifier() {
|
||||||
|
return classifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClassifier(ClassifierOrgModel classifier) {
|
||||||
|
this.classifier = classifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ClassifierNodeModel getData() {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setData(ClassifierNodeModel data) {
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "ClassifierFormModel{" +
|
||||||
|
"classifier=" + classifier +
|
||||||
|
", data=" + data +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public class ClassifierNodeModel implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private String classifierUid;
|
||||||
|
|
||||||
|
private RecordModel[] records;
|
||||||
|
|
||||||
|
public String getClassifierUid() {
|
||||||
|
return classifierUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClassifierUid(String classifierUid) {
|
||||||
|
this.classifierUid = classifierUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RecordModel[] getRecords() {
|
||||||
|
return records;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRecords(RecordModel[] records) {
|
||||||
|
this.records = records;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,155 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class ClassifierOrgModel implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private String uid;
|
||||||
|
|
||||||
|
private String code;
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private String description;
|
||||||
|
|
||||||
|
private String version;
|
||||||
|
|
||||||
|
private String publicId;
|
||||||
|
|
||||||
|
private String techName;
|
||||||
|
|
||||||
|
private String updatePeriod;
|
||||||
|
|
||||||
|
private ClassifierAttributeModel[] attributes;
|
||||||
|
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSX")
|
||||||
|
private LocalDateTime revisionTimestamp;
|
||||||
|
|
||||||
|
private String keyAttributeUid;
|
||||||
|
|
||||||
|
private String type;
|
||||||
|
|
||||||
|
public String getUid() {
|
||||||
|
return uid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUid(String uid) {
|
||||||
|
this.uid = uid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode() {
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode(String code) {
|
||||||
|
this.code = code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(String version) {
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPublicId() {
|
||||||
|
return publicId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublicId(String publicId) {
|
||||||
|
this.publicId = publicId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTechName() {
|
||||||
|
return techName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTechName(String techName) {
|
||||||
|
this.techName = techName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUpdatePeriod() {
|
||||||
|
return updatePeriod;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUpdatePeriod(String updatePeriod) {
|
||||||
|
this.updatePeriod = updatePeriod;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ClassifierAttributeModel[] getAttributes() {
|
||||||
|
return attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAttributes(ClassifierAttributeModel[] attributes) {
|
||||||
|
this.attributes = attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public LocalDateTime getRevisionTimestamp() {
|
||||||
|
return revisionTimestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRevisionTimestamp(LocalDateTime revisionTimestamp) {
|
||||||
|
this.revisionTimestamp = revisionTimestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getKeyAttributeUid() {
|
||||||
|
return keyAttributeUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setKeyAttributeUid(String keyAttributeUid) {
|
||||||
|
this.keyAttributeUid = keyAttributeUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setType(String type) {
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "ClassifierOrgModel{" +
|
||||||
|
"uid='" + uid + '\'' +
|
||||||
|
", code='" + code + '\'' +
|
||||||
|
", name='" + name + '\'' +
|
||||||
|
", description='" + description + '\'' +
|
||||||
|
", version='" + version + '\'' +
|
||||||
|
", publicId='" + publicId + '\'' +
|
||||||
|
", techName='" + techName + '\'' +
|
||||||
|
", updatePeriod='" + updatePeriod + '\'' +
|
||||||
|
", attributes=" + Arrays.toString(attributes) +
|
||||||
|
", revisionTimestamp=" + revisionTimestamp +
|
||||||
|
", keyAttributeUid='" + keyAttributeUid + '\'' +
|
||||||
|
", type='" + type + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public class RecordAttributeModel implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private String attributeUid;
|
||||||
|
|
||||||
|
private String value;
|
||||||
|
|
||||||
|
public String getAttributeUid() {
|
||||||
|
return attributeUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAttributeUid(String attributeUid) {
|
||||||
|
this.attributeUid = attributeUid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(String value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
package ervu.service.classifier.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public class RecordModel implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private String uid;
|
||||||
|
|
||||||
|
private RecordAttributeModel[] attributeValues;
|
||||||
|
|
||||||
|
public RecordAttributeModel[] getAttributeValues() {
|
||||||
|
return attributeValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAttributeValues(RecordAttributeModel[] attributeValues) {
|
||||||
|
this.attributeValues = attributeValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUid() {
|
||||||
|
return uid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUid(String uid) {
|
||||||
|
this.uid = uid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
package ervu.service.scheduer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
public interface SchedulerService {
|
||||||
|
|
||||||
|
void loadEveryPeriod();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
package ervu.service.scheduer;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import ervu.client.classified.ClassifierOrgClient;
|
||||||
|
import ervu.dao.classifier.ClassifierAttributeDao;
|
||||||
|
import ervu.dao.classifier.RecordAttributesDao;
|
||||||
|
import ervu.service.classifier.model.ClassifierAttributeModel;
|
||||||
|
import ervu.service.classifier.model.ClassifierFormModel;
|
||||||
|
import ervu.service.classifier.model.RecordModel;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Artyom Hackimullin
|
||||||
|
*/
|
||||||
|
@Service
|
||||||
|
public class SchedulerServiceImpl implements SchedulerService {
|
||||||
|
@Autowired
|
||||||
|
private ClassifierOrgClient classifierOrgClient;
|
||||||
|
@Autowired
|
||||||
|
private ClassifierAttributeDao classifierAttributeDao;
|
||||||
|
@Autowired
|
||||||
|
private RecordAttributesDao recordAttributesDao;
|
||||||
|
@Autowired
|
||||||
|
private ObjectMapper mapper;
|
||||||
|
|
||||||
|
@Scheduled(cron = "${ervu.cron.load.time:0 0 */1 * * *}")
|
||||||
|
@Transactional
|
||||||
|
public void loadEveryPeriod() {
|
||||||
|
try {
|
||||||
|
String json = Objects.requireNonNull(classifierOrgClient.getJsonClassifierOrgResponse());
|
||||||
|
ClassifierFormModel classifierFormModel = mapper.readValue(json, ClassifierFormModel.class);
|
||||||
|
ClassifierAttributeModel[] classifierAttributeModels = classifierFormModel.getClassifier()
|
||||||
|
.getAttributes();
|
||||||
|
RecordModel[] recordModels = classifierFormModel.getData().getRecords();
|
||||||
|
String currentVersion = classifierFormModel.getClassifier().getVersion();
|
||||||
|
var newVersion = Integer.parseInt(classifierFormModel.getClassifier().getVersion());
|
||||||
|
var versionFromDb = Integer.parseInt(recordAttributesDao.fetchVersion());
|
||||||
|
|
||||||
|
classifierAttributeDao.save(classifierAttributeModels);
|
||||||
|
recordAttributesDao.save(recordModels, currentVersion);
|
||||||
|
|
||||||
|
if (versionFromDb != 0 && versionFromDb < newVersion) {
|
||||||
|
recordAttributesDao.deleteAllByVersion(String.valueOf(versionFromDb));
|
||||||
|
classifierAttributeDao.deleteIfNotExistRecords();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -28,13 +28,19 @@ public class ReplyingKafkaConfig {
|
||||||
|
|
||||||
@Value("${ervu-kafka.bootstrap-servers}")
|
@Value("${ervu-kafka.bootstrap-servers}")
|
||||||
private String bootstrapServers;
|
private String bootstrapServers;
|
||||||
@Value("${ervu-kafka.reply-topic}")
|
|
||||||
|
@Value("${ervu-kafka.org-reply-topic}")
|
||||||
private String orgReplyTopic;
|
private String orgReplyTopic;
|
||||||
|
|
||||||
@Value("${ervu-journal-kafka.reply-topic}")
|
@Value("${ervu-journal-kafka.reply-topic}")
|
||||||
private String journalReplyTopic;
|
private String journalReplyTopic;
|
||||||
|
|
||||||
@Value("${ervu-kafka.group-id}")
|
@Value("${ervu-kafka.group-id}")
|
||||||
private String groupId;
|
private String groupId;
|
||||||
|
|
||||||
|
@Value("${ervu-kafka.reply-connection-timeout:30}")
|
||||||
|
private long connectionTimeout;
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public ProducerFactory<String, String> producerFactory() {
|
public ProducerFactory<String, String> producerFactory() {
|
||||||
Map<String, Object> configProps = new HashMap<>();
|
Map<String, Object> configProps = new HashMap<>();
|
||||||
|
|
@ -108,7 +114,7 @@ public class ReplyingKafkaConfig {
|
||||||
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate =
|
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate =
|
||||||
new ReplyingKafkaTemplate<>(pf, container);
|
new ReplyingKafkaTemplate<>(pf, container);
|
||||||
replyingKafkaTemplate.setCorrelationHeaderName("messageID");
|
replyingKafkaTemplate.setCorrelationHeaderName("messageID");
|
||||||
replyingKafkaTemplate.setDefaultReplyTimeout(Duration.ofSeconds(1000L));
|
replyingKafkaTemplate.setDefaultReplyTimeout(Duration.ofSeconds(connectionTimeout));
|
||||||
return replyingKafkaTemplate;
|
return replyingKafkaTemplate;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,8 @@ public class OrgInfo implements Serializable {
|
||||||
|
|
||||||
private String orgTypeLeg;
|
private String orgTypeLeg;
|
||||||
|
|
||||||
|
private String orgTypeName;
|
||||||
|
|
||||||
private String kpp;
|
private String kpp;
|
||||||
|
|
||||||
private AddressModel[] addresses;
|
private AddressModel[] addresses;
|
||||||
|
|
@ -98,6 +100,14 @@ public class OrgInfo implements Serializable {
|
||||||
this.orgTypeLeg = orgTypeLeg;
|
this.orgTypeLeg = orgTypeLeg;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getOrgTypeName() {
|
||||||
|
return orgTypeName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOrgTypeName(String orgTypeName) {
|
||||||
|
this.orgTypeName = orgTypeName;
|
||||||
|
}
|
||||||
|
|
||||||
public String getKpp() {
|
public String getKpp() {
|
||||||
return kpp;
|
return kpp;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import ervu.service.classifier.RecordAttributesService;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import ru.micord.ervu.kafka.model.*;
|
import ru.micord.ervu.kafka.model.*;
|
||||||
|
|
@ -53,10 +54,13 @@ public class EsiaAuthService {
|
||||||
@Qualifier("org")
|
@Qualifier("org")
|
||||||
private ReplyingKafkaService replyingKafkaService;
|
private ReplyingKafkaService replyingKafkaService;
|
||||||
|
|
||||||
@Value("${ervu-kafka.reply-topic}")
|
@Autowired
|
||||||
|
private RecordAttributesService recordAttributesService;
|
||||||
|
|
||||||
|
@Value("${ervu-kafka.org-reply-topic}")
|
||||||
private String requestReplyTopic;
|
private String requestReplyTopic;
|
||||||
|
|
||||||
@Value("${ervu-kafka.request-topic}")
|
@Value("${ervu-kafka.org-request-topic}")
|
||||||
private String requestTopic;
|
private String requestTopic;
|
||||||
|
|
||||||
public String generateAuthCodeUrl() {
|
public String generateAuthCodeUrl() {
|
||||||
|
|
@ -376,6 +380,7 @@ public class EsiaAuthService {
|
||||||
EmployeeModel employeeModel = ulDataService.getEmployeeModel(accessToken);
|
EmployeeModel employeeModel = ulDataService.getEmployeeModel(accessToken);
|
||||||
EmployeeModel chiefModel = ulDataService.getChiefEmployeeModel(accessToken);
|
EmployeeModel chiefModel = ulDataService.getChiefEmployeeModel(accessToken);
|
||||||
OrgInfo orgInfo = copyToOrgInfo(organizationModel, employeeModel, chiefModel);
|
OrgInfo orgInfo = copyToOrgInfo(organizationModel, employeeModel, chiefModel);
|
||||||
|
orgInfo.setOrgTypeName(recordAttributesService.findTitleByLeg(orgInfo.getOrgTypeLeg()));
|
||||||
String kafkaResponse = replyingKafkaService.sendMessageAndGetReply(requestTopic,
|
String kafkaResponse = replyingKafkaService.sendMessageAndGetReply(requestTopic,
|
||||||
requestReplyTopic, objectMapper.writeValueAsString(orgInfo)
|
requestReplyTopic, objectMapper.writeValueAsString(orgInfo)
|
||||||
);
|
);
|
||||||
|
|
|
||||||
10
backend/src/main/resources/config/changelog-master.xml
Normal file
10
backend/src/main/resources/config/changelog-master.xml
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<databaseChangeLog
|
||||||
|
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||||
|
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.5.xsd">
|
||||||
|
|
||||||
|
<include file="v_1.0/changelog-v_1.0.xml" relativeToChangelogFile="true"/>
|
||||||
|
|
||||||
|
</databaseChangeLog>
|
||||||
|
|
@ -0,0 +1,31 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<databaseChangeLog
|
||||||
|
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||||
|
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.5.xsd">
|
||||||
|
|
||||||
|
<changeSet id="create-table-classifier_attributes" author="a.khakimullin">
|
||||||
|
<sql>
|
||||||
|
CREATE TABLE classifier_attributes
|
||||||
|
(
|
||||||
|
classifier_attribute_id uuid primary key,
|
||||||
|
attribute_name varchar unique
|
||||||
|
);
|
||||||
|
</sql>
|
||||||
|
</changeSet>
|
||||||
|
|
||||||
|
<changeSet id="create-table-record-attributes" author="a.khakimullin">
|
||||||
|
<sql>
|
||||||
|
CREATE TABLE record_attributes
|
||||||
|
(
|
||||||
|
record_attribute_id bigserial primary key,
|
||||||
|
record_id uuid not null,
|
||||||
|
attribute_id uuid not null references classifier_attributes (classifier_attribute_id),
|
||||||
|
attribute_value varchar,
|
||||||
|
version varchar not null,
|
||||||
|
CONSTRAINT uni_record_uid_attribute_uid UNIQUE (record_id, attribute_id)
|
||||||
|
);
|
||||||
|
</sql>
|
||||||
|
</changeSet>
|
||||||
|
</databaseChangeLog>
|
||||||
10
backend/src/main/resources/config/v_1.0/changelog-v_1.0.xml
Normal file
10
backend/src/main/resources/config/v_1.0/changelog-v_1.0.xml
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<databaseChangeLog
|
||||||
|
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||||
|
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.5.xsd">
|
||||||
|
|
||||||
|
<include file="2024-29-08--01-create-table-record-attributes.xml" relativeToChangelogFile="true"/>
|
||||||
|
|
||||||
|
</databaseChangeLog>
|
||||||
|
|
@ -52,8 +52,12 @@ xa-data-source add \
|
||||||
/system-property=esia-uri.logout:add(value="https://esia-portal1.test.gosuslugi.ru/idp/ext/Logout")
|
/system-property=esia-uri.logout:add(value="https://esia-portal1.test.gosuslugi.ru/idp/ext/Logout")
|
||||||
/system-property=client-cert-hash:add(value="04508B4B0B58776A954A0E15F574B4E58799D74C61EE020B3330716C203E3BDD")
|
/system-property=client-cert-hash:add(value="04508B4B0B58776A954A0E15F574B4E58799D74C61EE020B3330716C203E3BDD")
|
||||||
/system-property=ervu-kafka.bootstrap-servers:add(value="localhost:9092")
|
/system-property=ervu-kafka.bootstrap-servers:add(value="localhost:9092")
|
||||||
/system-property=ervu-kafka.reply-topic:add(value="ervu.organization.response")
|
/system-property=ervu-kafka.org-reply-topic:add(value="ervu.organization.response")
|
||||||
/system-property=ervu-kafka.group-id:add(value="1")
|
/system-property=ervu-kafka.group-id:add(value="1")
|
||||||
|
/system-property=ervu-kafka.org-request-topic:add(value="ervu.organization.request")
|
||||||
|
/system-property=ervu.cron.load.enable(value="true")
|
||||||
|
/system-property=ervu.cron.load.time(value="0 0 */1 * * *")
|
||||||
|
/system-property=ervu.esnsi.classifier.url.load(value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8"")
|
||||||
/system-property=ervu-kafka.request-topic:add(value="ervu.organization.request")
|
/system-property=ervu-kafka.request-topic:add(value="ervu.organization.request")
|
||||||
/system-property=ervu-journal-kafka.request-topic:add(value="ervu.organization.journal.request")
|
/system-property=ervu-journal-kafka.request-topic:add(value="ervu.organization.journal.request")
|
||||||
/system-property=ervu-journal-kafka.reply-topic:add(value="ervu.organization.journal.response")
|
/system-property=ervu-journal-kafka.reply-topic:add(value="ervu.organization.journal.response")
|
||||||
|
|
|
||||||
|
|
@ -77,11 +77,13 @@
|
||||||
<property name="sign-url" value="https://ervu-sign-dev.k8s.micord.ru/sign"/>
|
<property name="sign-url" value="https://ervu-sign-dev.k8s.micord.ru/sign"/>
|
||||||
<property name="sesia-uri.logout" value="https://esia-portal1.test.gosuslugi.ru/idp/ext/Logout"/>
|
<property name="sesia-uri.logout" value="https://esia-portal1.test.gosuslugi.ru/idp/ext/Logout"/>
|
||||||
<property name="ervu-kafka.bootstrap-servers" value="localhost:9092"/>
|
<property name="ervu-kafka.bootstrap-servers" value="localhost:9092"/>
|
||||||
<property name="ervu-kafka.reply-topic" value="ervu.organization.response"/>
|
<property name="ervu-kafka.org-reply-topic" value="ervu.organization.response"/>
|
||||||
<property name="ervu-kafka.group-id" value="1"/>
|
<property name="ervu-kafka.group-id" value="1"/>
|
||||||
<property name="ervu-kafka.request-topic" value="ervu.organization.request"/>
|
<property name="ervu-kafka.org-request-topic" value="ervu.organization.request"/>
|
||||||
<property name="client-cert-hash" value="04508B4B0B58776A954A0E15F574B4E58799D74C61EE020B3330716C203E3BDD"/>
|
<property name="client-cert-hash" value="04508B4B0B58776A954A0E15F574B4E58799D74C61EE020B3330716C203E3BDD"/>
|
||||||
<property name="bpmn.enable" value="false"/>
|
<property name="bpmn.enable" value="false"/>
|
||||||
|
<property name="ervu.cron.load.time" value="0 0 */1 * * *"/>
|
||||||
|
<property name="ervu.esnsi.classifier.url.load" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8"/>
|
||||||
<property name="ervu-journal-kafka.request-topic" value="ervu.organization.journal.request"/>
|
<property name="ervu-journal-kafka.request-topic" value="ervu.organization.journal.request"/>
|
||||||
<property name="ervu-journal-kafka.reply-topic" value="ervu.organization.journal.response"/>
|
<property name="ervu-journal-kafka.reply-topic" value="ervu.organization.journal.response"/>
|
||||||
</system-properties>
|
</system-properties>
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue