SUPPORT-8471: new names

This commit is contained in:
Хакимуллин Артём 2024-09-10 22:00:48 +03:00
parent fe6ce5b8a2
commit e04b8c0c5f
16 changed files with 163 additions and 80 deletions

View file

@ -20,16 +20,16 @@ import org.springframework.stereotype.Component;
* @author Artyom Hackimullin
*/
@Component
public class ClassifierOrgClient {
public class EsnsiOkopfClient {
private static final Logger logger = LoggerFactory.getLogger(ClassifierOrgClient.class);
private static final Logger logger = LoggerFactory.getLogger(EsnsiOkopfClient.class);
@Value("${ervu.esnsi.classifier.url.load:#{null}}")
@Value("${ervu.esnsi.okopf.file.url:#{null}}")
private String uri;
@Retryable(value = {TimeoutException.class}, backoff =
@Backoff(delay = 2000))
public String getJsonClassifierOrgResponse() {
public String getJsonOkopFormData() {
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(uri))
@ -40,7 +40,7 @@ public class ClassifierOrgClient {
HttpResponse.BodyHandlers.ofInputStream()
);
if (response.statusCode() >= 200 && response.statusCode() <= 202) {
return unzipJsonFile(new ZipInputStream(response.body()));
return unzipFile(new ZipInputStream(response.body()));
}
logger.debug("Response unsuccessful. Json file has not be unzip.");
}
@ -50,7 +50,7 @@ public class ClassifierOrgClient {
return null;
}
private String unzipJsonFile(ZipInputStream zis) throws IOException {
private String unzipFile(ZipInputStream zis) throws IOException {
if (zis.getNextEntry() != null) {
ByteArrayInputStream isr = new ByteArrayInputStream(zis.readAllBytes());
try (BufferedReader br = new BufferedReader(new InputStreamReader(isr))) {

View file

@ -1,13 +0,0 @@
package ervu.dao.classifier;
import ervu.service.classifier.model.ClassifierAttributeModel;
/**
* @author Artyom Hackimullin
*/
public interface ClassifierAttributeDao {
void save(ClassifierAttributeModel[] classifierAttributeModels);
void deleteIfNotExistRecords();
}

View file

@ -0,0 +1,13 @@
package ervu.dao.classifier;
import ervu.service.classifier.model.OkopfAttributeModel;
/**
* @author Artyom Hackimullin
*/
public interface OkopfAttributeDao {
void save(OkopfAttributeModel[] okopfAttributeModels);
void deleteIfNotExistRecords();
}

View file

@ -3,7 +3,7 @@ package ervu.dao.classifier;
import java.util.Arrays;
import java.util.UUID;
import ervu.service.classifier.model.ClassifierAttributeModel;
import ervu.service.classifier.model.OkopfAttributeModel;
import org.jooq.DSLContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@ -15,13 +15,13 @@ import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.ClassifierAttrib
* @author Artyom Hackimullin
*/
@Repository
public class ClassifierAttributeDaoImpl implements ClassifierAttributeDao {
public class OkopfAttributeDaoImpl implements OkopfAttributeDao {
@Autowired
private DSLContext dsl;
public void save(ClassifierAttributeModel[] classifierAttributeModels) {
var queries = Arrays.stream(classifierAttributeModels).map(attribute -> {
public void save(OkopfAttributeModel[] okopfAttributeModels) {
var queries = Arrays.stream(okopfAttributeModels).map(attribute -> {
var uid = UUID.fromString(attribute.getUid());
return dsl.insertInto(CLASSIFIER_ATTRIBUTES)
.set(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID, uid)
@ -35,8 +35,7 @@ public class ClassifierAttributeDaoImpl implements ClassifierAttributeDao {
@Override
public void deleteIfNotExistRecords() {
dsl.deleteFrom(CLASSIFIER_ATTRIBUTES).whereNotExists(
dsl.selectOne()
dsl.deleteFrom(CLASSIFIER_ATTRIBUTES).whereNotExists(dsl.selectOne()
.from(RECORD_ATTRIBUTES)
.where(RECORD_ATTRIBUTES.ATTRIBUTE_ID.eq(CLASSIFIER_ATTRIBUTES.CLASSIFIER_ATTRIBUTE_ID)))
.execute();

View file

@ -1,13 +1,13 @@
package ervu.dao.classifier;
import ervu.service.classifier.model.RecordModel;
import ervu.service.classifier.model.OkopfGroupRecordModel;
/**
* @author Artyom Hackimullin
*/
public interface RecordAttributesDao {
void save(RecordModel[] recordModels, String version);
public interface OkopfRecordDao {
void save(OkopfGroupRecordModel[] recordModels, String version);
String fetchTitleByLeg(String leg);

View file

@ -3,7 +3,7 @@ package ervu.dao.classifier;
import java.util.Arrays;
import java.util.UUID;
import ervu.service.classifier.model.RecordModel;
import ervu.service.classifier.model.OkopfGroupRecordModel;
import org.jooq.DSLContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@ -16,15 +16,15 @@ import static ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.RecordAttributes
* @author Artyom Hackimullin
*/
@Repository
public class RecordAttributesDaoImpl implements RecordAttributesDao {
public class OkopfRecordDaoImpl implements OkopfRecordDao {
@Autowired
private DSLContext dsl;
@Override
public void save(RecordModel[] recordModels, String version) {
public void save(OkopfGroupRecordModel[] recordModels, String version) {
var queries = Arrays.stream(recordModels)
.flatMap(it -> Arrays.stream(it.getAttributeValues())
.flatMap(it -> Arrays.stream(it.getOkopfRecords())
.map(attribute -> {
var recordUid = UUID.fromString(it.getUid());
var attributeUid = UUID.fromString(attribute.getAttributeUid());

View file

@ -3,7 +3,7 @@ package ervu.service.classifier;
/**
* @author Artyom Hackimullin
*/
public interface RecordAttributesService {
public interface OkopfRecordService {
String findTitleByLeg(String leg);

View file

@ -1,6 +1,6 @@
package ervu.service.classifier;
import ervu.dao.classifier.RecordAttributesDao;
import ervu.dao.classifier.OkopfRecordDao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -9,14 +9,14 @@ import org.springframework.transaction.annotation.Transactional;
* @author Artyom Hackimullin
*/
@Service
public class RecordAttributesServiceImpl implements RecordAttributesService {
public class OkopfRecordServiceImpl implements OkopfRecordService {
@Autowired
private RecordAttributesDao recordAttributesDao;
private OkopfRecordDao okopfRecordDao;
@Override
@Transactional(readOnly = true)
public String findTitleByLeg(String leg) {
return recordAttributesDao.fetchTitleByLeg(leg);
return okopfRecordDao.fetchTitleByLeg(leg);
}
}

View file

@ -1,30 +0,0 @@
package ervu.service.classifier.model;
import java.io.Serializable;
/**
* @author Artyom Hackimullin
*/
public class ClassifierNodeModel implements Serializable {
private static final long serialVersionUID = 1L;
private String classifierUid;
private RecordModel[] records;
public String getClassifierUid() {
return classifierUid;
}
public void setClassifierUid(String classifierUid) {
this.classifierUid = classifierUid;
}
public RecordModel[] getRecords() {
return records;
}
public void setRecords(RecordModel[] records) {
this.records = records;
}
}

View file

@ -1,12 +1,15 @@
package ervu.service.classifier.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonRootName;
/**
* @author Artyom Hackimullin
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ClassifierAttributeModel {
public class OkopfAttributeModel implements Serializable {
private final static long serialVersionUID = 1L;
private String uid;
@ -81,7 +84,7 @@ public class ClassifierAttributeModel {
@Override
public String toString() {
return "ClassifierAttribute{" +
return "OkopfAttributeModel{" +
"uid='" + uid + '\'' +
", type='" + type + '\'' +
", name='" + name + '\'' +

View file

@ -0,0 +1,44 @@
package ervu.service.classifier.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author Artyom Hackimullin
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class OkopfFormDataModel implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("classifier")
private OkopfOrgModel okopfOrg;
@JsonProperty("data")
private OkopfNodeModel okopfNode;
public OkopfOrgModel getOkopfOrg() {
return okopfOrg;
}
public void setOkopfOrg(OkopfOrgModel okopfOrg) {
this.okopfOrg = okopfOrg;
}
public OkopfNodeModel getOkopfNode() {
return okopfNode;
}
public void setOkopfNode(OkopfNodeModel okopfNode) {
this.okopfNode = okopfNode;
}
@Override
public String toString() {
return "OkopfOrgModel{" +
"classifier=" + okopfOrg +
", data=" + okopfNode +
'}';
}
}

View file

@ -0,0 +1,33 @@
package ervu.service.classifier.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author Artyom Hackimullin
*/
public class OkopfGroupRecordModel implements Serializable {
private static final long serialVersionUID = 1L;
private String uid;
@JsonProperty("attributeValues")
private OkopfRecordModel[] okopfRecords;
public OkopfRecordModel[] getOkopfRecords() {
return okopfRecords;
}
public void setOkopfRecords(OkopfRecordModel[] okopfRecords) {
this.okopfRecords = okopfRecords;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
}

View file

@ -0,0 +1,34 @@
package ervu.service.classifier.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author Artyom Hackimullin
*/
public class OkopfNodeModel implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("classifierUid")
private String okopfUid;
@JsonProperty("records")
private OkopfGroupRecordModel[] okopfGroups;
public String getOkopfUid() {
return okopfUid;
}
public void setOkopfUid(String okopfUid) {
this.okopfUid = okopfUid;
}
public OkopfGroupRecordModel[] getOkopfGroups() {
return okopfGroups;
}
public void setOkopfGroups(OkopfGroupRecordModel[] okopfGroups) {
this.okopfGroups = okopfGroups;
}
}

View file

@ -11,7 +11,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
* @author Artyom Hackimullin
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ClassifierOrgModel implements Serializable {
public class OkopfOrgModel implements Serializable {
private static final long serialVersionUID = 1L;
private String uid;
@ -30,7 +30,7 @@ public class ClassifierOrgModel implements Serializable {
private String updatePeriod;
private ClassifierAttributeModel[] attributes;
private OkopfAttributeModel[] attributes;
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSX")
private LocalDateTime revisionTimestamp;
@ -103,11 +103,11 @@ public class ClassifierOrgModel implements Serializable {
this.updatePeriod = updatePeriod;
}
public ClassifierAttributeModel[] getAttributes() {
public OkopfAttributeModel[] getAttributes() {
return attributes;
}
public void setAttributes(ClassifierAttributeModel[] attributes) {
public void setAttributes(OkopfAttributeModel[] attributes) {
this.attributes = attributes;
}

View file

@ -5,7 +5,7 @@ import java.io.Serializable;
/**
* @author Artyom Hackimullin
*/
public class RecordAttributeModel implements Serializable {
public class OkopfRecordModel implements Serializable {
private static final long serialVersionUID = 1L;
private String attributeUid;

View file

@ -7,9 +7,9 @@
<changeSet id="create-table-classifier_attributes" author="a.khakimullin">
<sql>
CREATE TABLE classifier_attributes
CREATE TABLE okopf_attributes
(
classifier_attribute_id uuid primary key,
okopf_attribute_id uuid primary key,
attribute_name varchar unique
);
</sql>
@ -17,12 +17,12 @@
<changeSet id="create-table-record-attributes" author="a.khakimullin">
<sql>
CREATE TABLE record_attributes
CREATE TABLE okopf_records
(
record_attribute_id bigserial primary key,
okopf_record_id bigserial primary key,
record_id uuid not null,
attribute_id uuid not null references classifier_attributes (classifier_attribute_id),
attribute_value varchar,
attribute_id uuid not null references okopf_attributes (okopf_attribute_id),
value varchar,
version varchar not null,
CONSTRAINT uni_record_uid_attribute_uid UNIQUE (record_id, attribute_id)
);