SUPPORT-8863: update

This commit is contained in:
Artyom Hackimullin 2024-11-12 10:31:27 +03:00
parent 5200173fc2
commit 0f1446ffbf
7 changed files with 83 additions and 56 deletions

View file

@ -1,14 +1,17 @@
package ervu.client.okopf; package ervu.client.okopf;
import java.io.*; import java.io.BufferedInputStream;
import java.net.URI; import java.io.BufferedReader;
import java.net.http.HttpClient; import java.io.IOException;
import java.net.http.HttpRequest; import java.io.InputStreamReader;
import java.net.http.HttpResponse; import java.net.URL;
import java.util.concurrent.TimeoutException; import java.nio.charset.StandardCharsets;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.zip.ZipInputStream; import java.util.zip.ZipInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.retry.annotation.Backoff; import org.springframework.retry.annotation.Backoff;
import org.springframework.retry.annotation.Retryable; import org.springframework.retry.annotation.Retryable;
@ -19,37 +22,27 @@ import org.springframework.stereotype.Component;
*/ */
@Component @Component
public class EsnsiOkopfClient { public class EsnsiOkopfClient {
private static final Logger logger = LoggerFactory.getLogger(EsnsiOkopfClient.class);
@Value("${esnsi.okopf.url}") @Value("${esnsi.okopf.url}")
private String uri; private String url;
@Retryable(value = {TimeoutException.class}, backoff = @Retryable(value = IOException.class, maxAttemptsExpression = "${esnsi.okopf.retry.max.attempts.load:3}", backoff =
@Backoff(delay = 2000)) @Backoff(delayExpression = "${esnsi.okop.retry.delay.load:30000}"))
public String getJsonOkopFormData() { public String getJsonOkopFormData() {
HttpClient client = HttpClient.newHttpClient(); try (BufferedInputStream in = new BufferedInputStream(new URL(url).openStream());
HttpRequest request = HttpRequest.newBuilder() ZipInputStream archiveStream = new ZipInputStream(in);
.uri(URI.create(uri)) BufferedReader br = new BufferedReader(
.GET() new InputStreamReader(archiveStream, StandardCharsets.UTF_8))) {
.build(); if (Objects.nonNull(archiveStream.getNextEntry())) {
try { logger.info("Received an non-empty archive in response.");
HttpResponse<InputStream> response = client.send(request, return br.lines().collect(Collectors.joining(System.lineSeparator()));
HttpResponse.BodyHandlers.ofInputStream()
);
if (response.statusCode() >= 200 && response.statusCode() <= 202) {
return unzipFile(new ZipInputStream(response.body()));
} }
throw new RuntimeException("The returned status " + response.statusCode() + " is incorrect. Json file has not be unzip"); logger.info("Received an empty archive in response. Skipping load okpof file process");
} }
catch (IOException | InterruptedException e) { catch (IOException e) {
throw new RuntimeException(e); logger.error("Failed to send HTTP request {} or process the response for okopf file.", url, e);
} }
} return null;
private String unzipFile(ZipInputStream zis) throws IOException {
if (zis.getNextEntry() != null) {
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(zis.readAllBytes())));
return br.lines().collect(Collectors.joining(System.lineSeparator()));
}
throw new RuntimeException("ZipInputStream is empty and has not been unzipped");
} }
} }

View file

@ -10,7 +10,7 @@ import ervu.model.okopf.OkopfModel;
* @author Artyom Hackimullin * @author Artyom Hackimullin
*/ */
public interface OkopfDao { public interface OkopfDao {
void save(List<OkopfModel> recordModels); void saveOrUpdate(List<OkopfModel> recordModels);
String fetchTitleByLeg(String leg); String fetchTitleByLeg(String leg);
} }

View file

@ -1,8 +1,11 @@
package ervu.dao.okopf; package ervu.dao.okopf;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import ervu.model.okopf.OkopfModel; import ervu.model.okopf.OkopfModel;
import ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.records.OkopfRecordsRecord;
import org.jooq.DSLContext; import org.jooq.DSLContext;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
@ -20,18 +23,12 @@ public class OkopfDaoImpl implements OkopfDao {
private DSLContext dsl; private DSLContext dsl;
@Override @Override
public void save(List<OkopfModel> recordModels) { public void saveOrUpdate(List<OkopfModel> okopfModels) {
var queries = recordModels.stream().map(record -> deleteNotActualOkopfRecords(okopfModels);
dsl.insertInto(OKOPF_RECORDS, OKOPF_RECORDS.OKOPF_RECORDS_ID, OKOPF_RECORDS.NAME, OKOPF_RECORDS.VERSION) dsl.batchUpdate(okopfModels.stream()
.values(record.getCode(), record.getName(), record.getVersion()) .map(this::mapOkopfModelToRecord)
.onConflict(OKOPF_RECORDS.OKOPF_RECORDS_ID) .toList())
.doUpdate() .execute();
.set(OKOPF_RECORDS.NAME, record.getName())
.set(OKOPF_RECORDS.VERSION, record.getVersion())
.where(OKOPF_RECORDS.OKOPF_RECORDS_ID.eq(record.getCode()))
).toList();
dsl.batch(queries).execute();
} }
@Override @Override
@ -41,4 +38,23 @@ public class OkopfDaoImpl implements OkopfDao {
.where(OKOPF_RECORDS.OKOPF_RECORDS_ID.eq(leg)) .where(OKOPF_RECORDS.OKOPF_RECORDS_ID.eq(leg))
.fetchOne(OKOPF_RECORDS.NAME); .fetchOne(OKOPF_RECORDS.NAME);
} }
private void deleteNotActualOkopfRecords(List<OkopfModel> recordModels) {
Set<String> ids = recordModels
.stream()
.map(OkopfModel::getCode)
.collect(Collectors.toSet());
dsl.deleteFrom(OKOPF_RECORDS)
.where(OKOPF_RECORDS.OKOPF_RECORDS_ID.notIn(ids))
.execute();
}
private OkopfRecordsRecord mapOkopfModelToRecord(OkopfModel model) {
OkopfRecordsRecord record = dsl.newRecord(OKOPF_RECORDS);
record.setValue(OKOPF_RECORDS.OKOPF_RECORDS_ID, model.getCode());
record.setValue(OKOPF_RECORDS.NAME, model.getName());
record.setValue(OKOPF_RECORDS.VERSION, model.getVersion());
return record;
}
} }

View file

@ -5,7 +5,6 @@ import java.util.List;
import java.util.stream.Stream; import java.util.stream.Stream;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.client.okopf.EsnsiOkopfClient; import ervu.client.okopf.EsnsiOkopfClient;
import ervu.dao.okopf.OkopfDao; import ervu.dao.okopf.OkopfDao;
@ -24,6 +23,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import static org.springframework.scheduling.config.ScheduledTaskRegistrar.CRON_DISABLED; import static org.springframework.scheduling.config.ScheduledTaskRegistrar.CRON_DISABLED;
import static org.springframework.util.StringUtils.hasText;
/** /**
@ -48,11 +48,11 @@ public class EsnsiOkopfSchedulerServiceImpl implements EsnsiOkopfSchedulerServic
@Transactional @Transactional
public void init() { public void init() {
if (!cronLoad.equals(CRON_DISABLED)) { if (!cronLoad.equals(CRON_DISABLED)) {
logger.info("Synchronization with OKOPF enabled"); logger.info("Synchronization with esnsi okopf enabled");
load(); load();
} }
else { else {
logger.info("Synchronization with OKOPF disabled"); logger.info("Synchronization with esnsi okopf disabled");
} }
} }
@ -60,20 +60,25 @@ public class EsnsiOkopfSchedulerServiceImpl implements EsnsiOkopfSchedulerServic
@SchedulerLock(name = "loadOkopf") @SchedulerLock(name = "loadOkopf")
@Transactional @Transactional
public void load() { public void load() {
logger.info("Loading okopf file");
String data = esnsiOkopfClient.getJsonOkopFormData();
try { try {
logger.info("Loading okopf file"); if (hasText(data)) {
String data = esnsiOkopfClient.getJsonOkopFormData(); logger.info("Beginning to save okopf data");
OkopfOrgModel orgModel = mapper.readValue(data, OkopfOrgModel.class); OkopfOrgModel orgModel = mapper.readValue(data, OkopfOrgModel.class);
int currentVersion = mapper.readTree(data).findValue("version").asInt(); int currentVersion = mapper.readTree(data).findValue("version").asInt();
List<OkopfModel> okopfRecords = mapToOkopfRecords(orgModel.getData(), currentVersion); List<OkopfModel> okopfRecords = mapToOkopfRecords(orgModel.getData(), currentVersion);
okopfDao.save(okopfRecords); okopfDao.saveOrUpdate(okopfRecords);
logger.info("Saved okopf data");
}
} }
catch (JsonProcessingException e) { catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
private List<OkopfModel> mapToOkopfRecords(OkopfDataModel dataModel, int version) { private List<OkopfModel> mapToOkopfRecords(OkopfDataModel dataModel, int version) {
logger.info("Parsing from json file to okopf model");
return Arrays.stream(dataModel.getDetails()) return Arrays.stream(dataModel.getDetails())
.flatMap(detail -> { .flatMap(detail -> {
OkopfAttributeValueModel[] attributeValues = detail.getAttributeValues(); OkopfAttributeValueModel[] attributeValues = detail.getAttributeValues();

View file

@ -783,7 +783,12 @@ JBPM использует 3 корневых категории логирова
#### Взаимодействие с ЕСНСИ в части получения справочника ОКОПФ #### Взаимодействие с ЕСНСИ в части получения справочника ОКОПФ
- `ESNSI_OKOPF_URL` - url который обращается к еснси для получения справочника и скачивает данные спровочников организации в виде заархивированного json файла. - `ESNSI_OKOPF_URL` - url который обращается к еснси для получения справочника и скачивает данные спровочников организации в виде заархивированного json файла.
- `ESNSI_OKOPF_CRON_LOAD` - настройка, которая указывет расписание для загрузки справочника окопф и сохранение данных по справкам в БД - `ESNSI_OKOPF_CRON_LOAD` - настройка, которая указывет расписание для загрузки справочника окопф и
сохранение данных по справкам в БД
- `ESNSI_OKOPF_RETRY_DELAY_LOAD` - настройка, которая указывет на повторную попытку загрузить
справочник окопф с задержкой
- `ESNSI_OKOPF_RETRY_MAX_ATTEMPTS_LOAD` - настройка, которая указывет на максимальное кол-во попыток
повторно загрузить справочник окопф
#### Взаимодействие с WebDav #### Взаимодействие с WebDav

View file

@ -35,12 +35,18 @@ ERVU_KAFKA_JOURNAL_REPLY_TOPIC=ervu.organization.journal.response
DB.JOURNAL.EXCLUDED.STATUSES=Направлено в ЕРВУ,Получен ЕРВУ DB.JOURNAL.EXCLUDED.STATUSES=Направлено в ЕРВУ,Получен ЕРВУ
ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8 ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8
ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * * ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * *
ESNSI_OKOPF_RETRY_MAX_ATTEMPTS_LOAD=3
ESNSI_OKOPF_RETRY_DELAY_LOAD=1000
ERVU_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT ERVU_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
ERVU_KAFKA_SASL_MECHANISM=SCRAM-SHA-256 ERVU_KAFKA_SASL_MECHANISM=SCRAM-SHA-256
ERVU_KAFKA_USERNAME=user1 ERVU_KAFKA_USERNAME=user1
ERVU_KAFKA_PASSWORD=Blfi9d2OFG ERVU_KAFKA_PASSWORD=Blfi9d2OFG
ERVU_KAFKA_EXCERPT_REPLY_TOPIC=ervu.lkrp.excerpt.response ERVU_KAFKA_EXCERPT_REPLY_TOPIC=ervu.lkrp.excerpt.response
ERVU_KAFKA_EXCERPT_REQUEST_TOPIC=ervu.lkrp.excerpt.request ERVU_KAFKA_EXCERPT_REQUEST_TOPIC=ervu.lkrp.excerpt.request
ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/16271/file?extension=JSON&encoding=UTF_8
ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * *
ESNSI_OKOPF_RETRY_MAX_ATTEMPTS_LOAD=3
ESNSI_OKOPF_RETRY_DELAY_LOAD=30000
ERVU_FILE_UPLOAD_MAX_FILE_SIZE=5242880 ERVU_FILE_UPLOAD_MAX_FILE_SIZE=5242880
ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE=6291456 ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE=6291456

View file

@ -83,7 +83,9 @@
<property name="ervu.kafka.username" value="user1"/> <property name="ervu.kafka.username" value="user1"/>
<property name="ervu.kafka.password" value="Blfi9d2OFG"/> <property name="ervu.kafka.password" value="Blfi9d2OFG"/>
<property name="esnsi.okopf.cron.load" value="0 0 */1 * * *"/> <property name="esnsi.okopf.cron.load" value="0 0 */1 * * *"/>
<property name="esnsi.okopf.url" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&amp;encoding=UTF_8"/> <property name="esnsi.okopf.url" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/16271/file?extension=JSON&amp;encoding=UTF_8"/>
<property name="esnsi.okop.retry.delay.load" value="30000"/>
<property name="esnsi.okopf.retry.max.attempts.load" value="3"/>
<property name="ervu.kafka.journal.request.topic" value="ervu.organization.journal.request"/> <property name="ervu.kafka.journal.request.topic" value="ervu.organization.journal.request"/>
<property name="ervu.kafka.journal.reply.topic" value="ervu.organization.journal.response"/> <property name="ervu.kafka.journal.reply.topic" value="ervu.organization.journal.response"/>
<property name="db.journal.excluded.statuses" value="Направлено в ЕРВУ,Получен ЕРВУ"/> <property name="db.journal.excluded.statuses" value="Направлено в ЕРВУ,Получен ЕРВУ"/>