ERVU-514 : medicine flow

This commit is contained in:
Fusionshh 2025-09-23 14:45:02 +03:00
parent f5242a17ca
commit 31b1f6f27a
31 changed files with 3701 additions and 2677 deletions

View file

@ -1,27 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<workflow>
<name>job_citizen_information_search</name>
<name>disability_job</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<workflow_version/>
<workflow_status>0</workflow_status>
<created_user>-</created_user>
<created_date>2025/06/05 14:27:15.055</created_date>
<created_date>2025/09/08 16:38:17.982</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/05 14:27:15.055</modified_date>
<modified_date>2025/09/08 16:38:17.982</modified_date>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value>3001-01-01 00:00:00</default_value>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
</parameters>
<actions>
<action>
<name>Start</name>
@ -37,12 +26,12 @@
<schedulerType>0</schedulerType>
<weekDay>1</weekDay>
<parallel>N</parallel>
<xloc>416</xloc>
<yloc>208</yloc>
<xloc>112</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_citizen_information_search_job_exists.hpl</name>
<name>check_if_job_execution_exists.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -52,7 +41,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_information_search/checkpoints/check_if_citizen_information_search_job_exists.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/support/check_if_job_execution_exists.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -63,12 +52,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>608</xloc>
<yloc>208</yloc>
<xloc>480</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>citizen_information_search_job_exists_check</name>
<name>employer_job_execution_exists_check</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
@ -80,12 +69,12 @@
<valuetype>variable</valuetype>
<variablename>JOB_EXECUTED_FLAG</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>208</yloc>
<xloc>784</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitments_five_flow.hpl</name>
<name>recruitment_five_flow.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -95,7 +84,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_information_search/recruitments_five_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/recruitment_five_flow.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -106,12 +95,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>Y</parallel>
<xloc>1120</xloc>
<yloc>208</yloc>
<xloc>1104</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow_on_error.hpl</name>
<name>recruitment_five_flow_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -121,7 +110,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_information_search/recruitment_five_flow_on_error.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/recruitment_five_flow_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -132,12 +121,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1120</xloc>
<yloc>512</yloc>
<xloc>1040</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_need_to_repeat_job.hpl</name>
<name>check_if_need_to_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -147,7 +136,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_information_search/check_if_need_to_repeat_job.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/support/check_if_need_to_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -158,8 +147,8 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>368</yloc>
<xloc>784</xloc>
<yloc>304</yloc>
<attributes_hac/>
</action>
<action>
@ -175,8 +164,8 @@
<valuetype>variable</valuetype>
<variablename>NEED_TO_REPEAT_JOB</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>512</yloc>
<xloc>784</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
@ -190,7 +179,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_information_search/recruitment_five_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/recruitment_five_flow_delta.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -201,50 +190,63 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<xloc>784</xloc>
<yloc>704</yloc>
<attributes_hac/>
</action>
<action>
<name>init_job_name</name>
<description/>
<type>SET_VARIABLES</type>
<attributes/>
<fields>
<field>
<variable_name>JOB_NAME</variable_name>
<variable_type>CURRENT_WORKFLOW</variable_type>
<variable_value>disability_job</variable_value>
</field>
</fields>
<file_variable_type>CURRENT_WORKFLOW</file_variable_type>
<filename/>
<replacevars>N</replacevars>
<parallel>N</parallel>
<xloc>272</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
</actions>
<hops>
<hop>
<from>Start</from>
<to>check_if_citizen_information_search_job_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>check_if_citizen_information_search_job_exists.hpl</from>
<to>citizen_information_search_job_exists_check</to>
<from>check_if_job_execution_exists.hpl</from>
<to>employer_job_execution_exists_check</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_information_search_job_exists_check</from>
<to>recruitments_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_information_search_job_exists_check</from>
<to>check_if_need_to_repeat_job.hpl</to>
<from>employer_job_execution_exists_check</from>
<to>check_if_need_to_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>check_if_need_to_repeat_job.hpl</from>
<from>check_if_need_to_repeat.hpl</from>
<to>Simple evaluation</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>employer_job_execution_exists_check</from>
<to>recruitment_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple evaluation</from>
<to>recruitment_five_flow_on_error.hpl</to>
<to>recruitment_five_flow_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
@ -256,36 +258,22 @@
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Start</from>
<to>init_job_name</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>init_job_name</from>
<to>check_if_job_execution_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
</hops>
<notepads>
<notepad>
<backgroundcolorblue>251</backgroundcolorblue>
<backgroundcolorgreen>232</backgroundcolorgreen>
<backgroundcolorred>201</backgroundcolorred>
<bordercolorblue>90</bordercolorblue>
<bordercolorgreen>58</bordercolorgreen>
<bordercolorred>14</bordercolorred>
<fontbold>N</fontbold>
<fontcolorblue>90</fontcolorblue>
<fontcolorgreen>58</fontcolorgreen>
<fontcolorred>14</fontcolorred>
<fontitalic>N</fontitalic>
<fontname>Segoe UI</fontname>
<fontsize>9</fontsize>
<height>149</height>
<xloc>976</xloc>
<yloc>0</yloc>
<note>
M_R_UP_DATE
M_R_CR_DATE
0001-01-01 00:00:00
3001-01-01 00:00:00</note>
<width>114</width>
</notepad>
</notepads>
<attributes/>
</workflow>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>citizen_information_search_flow</name>
<name>disability_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,73 +13,68 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 14:31:22.799</created_date>
<created_date>2025/09/08 19:24:20.455</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 14:31:22.799</modified_date>
<modified_date>2025/09/08 19:24:20.455</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>disability_input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Filter rows</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>disability_output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disability_output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disability_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Table output</to>
<from>disability_output</from>
<to>has_disability_update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Add constants</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Add constants</from>
<to>Update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<from>has_disability_update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<from>has_disability_update</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -92,40 +87,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1168</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>Add constants</name>
<type>Constant</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<fields>
<field>
<length>-1</length>
<name>is_wanted</name>
<nullif>true</nullif>
<precision>-1</precision>
<set_empty_string>N</set_empty_string>
<type>Boolean</type>
</field>
</fields>
<attributes/>
<GUI>
<xloc>880</xloc>
<yloc>624</yloc>
<xloc>1504</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
@ -159,8 +127,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1008</xloc>
<yloc>448</yloc>
<xloc>1216</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
@ -185,14 +153,13 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET status = 'SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}'
and status = 'PROCESSING';
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1488</xloc>
<yloc>624</yloc>
<xloc>1504</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -220,8 +187,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</sql>
<attributes/>
<GUI>
<xloc>80</xloc>
<yloc>320</yloc>
<xloc>288</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
@ -237,8 +204,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</partitioning>
<attributes/>
<GUI>
<xloc>1488</xloc>
<yloc>320</yloc>
<xloc>704</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -273,15 +240,15 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1200</xloc>
<yloc>624</yloc>
<xloc>1504</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -291,12 +258,12 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>720</xloc>
<yloc>624</yloc>
<xloc>704</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>disability_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -308,41 +275,43 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<sql>
SELECT
ri.recruit_id,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'naimOrg' AS name_organ_start_search,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'ugolovZakon' AS article_criminal_code,
TO_DATE(ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'dataNachRozisk', 'YYYY-MM-DD') AS wanted_start_date,
TO_DATE(ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'dataPrekrRozysk', 'YYYY-MM-DD') AS wanted_end_date,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'osnovPrekrRozysk' AS grounds_stopping_search,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'naimOrgPrekrRozysk' AS name_organ_end_search
FROM recruits_info ri
JOIN ervu_dashboard.citizen r ON r.recruit_id = ri.recruit_id
WHERE'${IDM_ID}' != ''
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
AND '${M_R_CR_DATE}'::timestamp >= ri.created_at
AND jsonb_typeof(ri.info->'svedFL'->'extend'->'rozysk') = 'object'
</sql>
<limit/>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' AS invalid_elem
FROM recruits_info ri
WHERE ri.info -> 'svedFL' -> 'svedInvalid' ->> 'invalid' &lt;&gt; 'null'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
)
SELECT fd.recruit_id,
invalid_elem ->> 'id' AS source_id,
to_date(invalid_elem ->> 'dataSved', 'YYYY-MM-DD') as source_update_date,
invalid_elem ->> 'gruppa' as disability_group,
invalid_elem ->> 'ustanOrg' as disability_establish_org_name,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataUstan', 'YYYY-MM-DD') as register_date,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataPodtverzhdPlan', 'YYYY-MM-DD') as confirmation_date,
to_date(invalid_elem ->> 'dataSnyat', 'YYYY-MM-DD') as deregistration_date,
coalesce(invalid_elem -> 'ustanInvalid' ->> 'prBessrInvl' = '1', false) as permanent,
true as has_disability
FROM filteredData fd;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>336</xloc>
<yloc>320</yloc>
<xloc>512</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table output</name>
<name>disability_output</name>
<type>TableOutput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<fields>
<field>
@ -350,28 +319,36 @@ WHERE'${IDM_ID}' != ''
<stream_name>recruit_id</stream_name>
</field>
<field>
<column_name>name_organ_start_search</column_name>
<stream_name>name_organ_start_search</stream_name>
<column_name>source_id</column_name>
<stream_name>source_id</stream_name>
</field>
<field>
<column_name>article_criminal_code</column_name>
<stream_name>article_criminal_code</stream_name>
<column_name>source_update_date</column_name>
<stream_name>source_update_date</stream_name>
</field>
<field>
<column_name>wanted_start_date</column_name>
<stream_name>wanted_start_date</stream_name>
<column_name>disability_group</column_name>
<stream_name>disability_group</stream_name>
</field>
<field>
<column_name>wanted_end_date</column_name>
<stream_name>wanted_end_date</stream_name>
<column_name>disability_establish_org_name</column_name>
<stream_name>disability_establish_org_name</stream_name>
</field>
<field>
<column_name>grounds_stopping_search</column_name>
<stream_name>grounds_stopping_search</stream_name>
<column_name>register_date</column_name>
<stream_name>register_date</stream_name>
</field>
<field>
<column_name>name_organ_end_search</column_name>
<stream_name>name_organ_end_search</stream_name>
<column_name>confirmation_date</column_name>
<stream_name>confirmation_date</stream_name>
</field>
<field>
<column_name>deregistration_date</column_name>
<stream_name>deregistration_date</stream_name>
</field>
<field>
<column_name>permanent</column_name>
<stream_name>permanent</stream_name>
</field>
</fields>
<ignore_errors>N</ignore_errors>
@ -382,29 +359,29 @@ WHERE'${IDM_ID}' != ''
<return_keys>N</return_keys>
<schema>ervu_dashboard</schema>
<specify_fields>Y</specify_fields>
<table>citizen_information_search</table>
<table>disability</table>
<tablename_in_field>N</tablename_in_field>
<tablename_in_table>Y</tablename_in_table>
<truncate>N</truncate>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>720</xloc>
<yloc>320</yloc>
<xloc>992</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Update</name>
<name>has_disability_update</name>
<type>Update</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<error_ignored>N</error_ignored>
<ignore_flag_field/>
@ -418,39 +395,39 @@ WHERE'${IDM_ID}' != ''
<schema>ervu_dashboard</schema>
<table>citizen</table>
<value>
<name>is_wanted</name>
<rename>is_wanted</rename>
<name>has_disability</name>
<rename>has_disability</rename>
</value>
</lookup>
<skip_lookup>N</skip_lookup>
<use_batch>N</use_batch>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1008</xloc>
<yloc>624</yloc>
<xloc>1216</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Table output</source_transform>
<source_transform>disability_output</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>
</error>
<error>
<source_transform>Update</source_transform>
<source_transform>has_disability_update</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_decription</descriptions_valuename>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>border_crossing_flow_delta</name>
<name>disability_flow_delta</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,21 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:22:56.048</created_date>
<created_date>2025/09/08 20:36:38.686</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:22:56.048</modified_date>
<modified_date>2025/09/08 20:36:38.686</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<from>Create job execution record</from>
<to>disability_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -35,53 +35,43 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Group by</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>User defined Java expression</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>User defined Java expression</from>
<to>Update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Group by</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<to>Filter rows</to>
<from>Identify last row in a stream</from>
<to>disability_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<from>disability_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disability_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disability_upsert</from>
<to>has_disability_update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>has_disability_update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>has_disability_update</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
@ -97,13 +87,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1392</xloc>
<yloc>528</yloc>
<xloc>1936</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -137,8 +127,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1392</xloc>
<yloc>352</yloc>
<xloc>1712</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -168,7 +158,7 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1872</xloc>
<xloc>1936</xloc>
<yloc>208</yloc>
</GUI>
</transform>
@ -194,14 +184,14 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET
status = 'DELTA_PROCESSING',
execution_datetime = DEFAULT,
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>352</yloc>
<xloc>816</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
@ -217,7 +207,7 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<attributes/>
<GUI>
<xloc>1632</xloc>
<xloc>1232</xloc>
<yloc>208</yloc>
</GUI>
</transform>
@ -253,50 +243,15 @@ and recruitment_id = '${IDM_ID}';</sql>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1632</xloc>
<yloc>320</yloc>
</GUI>
</transform>
<transform>
<name>Group by</name>
<type>GroupBy</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<add_linenr>N</add_linenr>
<all_rows>N</all_rows>
<directory>${java.io.tmpdir}</directory>
<fields>
<field>
<aggregate>border_crossed_raw</aggregate>
<subject>is_border_crossed</subject>
<type>MAX</type>
</field>
</fields>
<give_back_row>N</give_back_row>
<group>
<field>
<name>recruit_id</name>
</field>
</group>
<ignore_aggregate>N</ignore_aggregate>
<prefix>grp</prefix>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>208</yloc>
<xloc>1936</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -306,56 +261,12 @@ and recruitment_id = '${IDM_ID}';</sql>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>208</yloc>
<xloc>1232</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>border_crossing</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>date_crossing</name>
<rename>date_crossing</rename>
<update>Y</update>
</value>
<value>
<name>return_date</name>
<rename>return_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>352</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>disability_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -367,65 +278,33 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
recruit_id,
(string_agg(elem->>'dataVyezd', ', '))::date as date_crossing,
(string_agg(elem->>'dataVozvrashh', ', '))::date as return_date,
1 as border_crossed
FROM ervu_dashboard.recruits_info ri
JOIN ervu_dashboard.citizen r ON r.recruit_id = ri.recruit_id AND '${IDM_ID}' != '' -- Проверка на пустую строку
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
AND ri.updated_at >= '${M_R_UP_DATE}'::timestamp,
LATERAL jsonb_array_elements(info->'svedFL'->'extend'->'svedPeresechGran') as elem
WHERE jsonb_typeof(info->'svedFL'->'extend'->'svedPeresechGran') = 'array'
AND (elem->>'dataVyezd' IS NOT NULL OR elem->>'dataVozvrashh' IS NOT NULL)
GROUP BY recruit_id;</sql>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' AS invalid_elem
FROM recruits_info ri
WHERE ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' &lt;&gt; 'null'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
and to_date(ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' ->> 'dataSved', 'YYYY-MM-DD') > '${MAX_SOURCE_UPDATE_DATE}'
)
SELECT fd.recruit_id,
invalid_elem ->> 'id' AS source_id,
to_date(invalid_elem ->> 'dataSved', 'YYYY-MM-DD') as source_update_date,
invalid_elem ->> 'gruppa' as disability_group,
invalid_elem ->> 'ustanOrg' as disability_establish_org_name,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataUstan', 'YYYY-MM-DD') as register_date,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataPodtverzhdPlan', 'YYYY-MM-DD') as confirmation_date,
to_date(invalid_elem ->> 'dataSnyat', 'YYYY-MM-DD') as deregistration_date,
coalesce(invalid_elem -> 'ustanInvalid' ->> 'prBessrInvl' = '1', false) as permanent
FROM filteredData fd;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>560</xloc>
<yloc>352</yloc>
<xloc>1040</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Update</name>
<type>Update</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<error_ignored>Y</error_ignored>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>citizen</table>
<value>
<name>border_crossed</name>
<rename>border_crossed</rename>
</value>
</lookup>
<skip_lookup>N</skip_lookup>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1392</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
<name>User defined Java expression</name>
<type>Janino</type>
<name>disability_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
@ -434,27 +313,124 @@ GROUP BY recruit_id;</sql>
<method>none</method>
<schema_name/>
</partitioning>
<formula>
<field_name>border_crossed</field_name>
<formula_string>border_crossed_raw == 1</formula_string>
<value_type>Boolean</value_type>
<value_length>-1</value_length>
<value_precision>-1</value_precision>
<replace_field/>
</formula>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>disability</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>disability_group</name>
<rename>disability_group</rename>
<update>Y</update>
</value>
<value>
<name>disability_establish_org_name</name>
<rename>disability_establish_org_name</rename>
<update>Y</update>
</value>
<value>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>confirmation_date</name>
<rename>confirmation_date</rename>
<update>Y</update>
</value>
<value>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
<value>
<name>permanent</name>
<rename>permanent</rename>
<update>Y</update>
</value>
<value>
<name>last_row</name>
<rename>last_row</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1168</xloc>
<yloc>208</yloc>
<xloc>1520</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>has_disability_update</name>
<type>Update</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<error_ignored>N</error_ignored>
<ignore_flag_field/>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
<name2/>
</key>
<key>
<condition>&lt;></condition>
<field>has_disability</field>
<name>has_disability</name>
<name2/>
</key>
<schema>ervu_dashboard</schema>
<table>citizen</table>
<value>
<name>has_disability</name>
<rename>has_disability</rename>
</value>
</lookup>
<skip_lookup>N</skip_lookup>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1712</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>disability_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>
@ -462,7 +438,7 @@ GROUP BY recruit_id;</sql>
<min_pct_rows/>
</error>
<error>
<source_transform>Update</source_transform>
<source_transform>has_disability_update</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>citizen_information_search_flow_repeat</name>
<name>disability_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,31 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/08 13:25:52.319</created_date>
<created_date>2025/09/08 20:32:10.265</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/08 13:25:52.319</modified_date>
<modified_date>2025/09/08 20:32:10.265</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Add constants</from>
<to>Update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<from>Create job execution record</from>
<to>disability_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -46,38 +36,43 @@
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream 2</from>
<to>Add constants</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>disability_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>disability_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disability_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Identify last row in a stream 2</to>
<from>disability_upsert</from>
<to>has_disability_update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>has_disability_update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>has_disability_update</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
</order>
@ -92,40 +87,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Add constants</name>
<type>Constant</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<fields>
<field>
<length>-1</length>
<name>is_wanted</name>
<nullif>true</nullif>
<precision>-1</precision>
<set_empty_string>N</set_empty_string>
<type>Boolean</type>
</field>
</fields>
<attributes/>
<GUI>
<xloc>880</xloc>
<yloc>640</yloc>
<xloc>1504</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform>
@ -159,8 +127,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>992</xloc>
<yloc>432</yloc>
<xloc>1120</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform>
@ -185,14 +153,13 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET status = 'SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}'
and status = 'PROCESSING';
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1312</xloc>
<yloc>640</yloc>
<xloc>1504</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
@ -214,31 +181,17 @@ and status = 'PROCESSING';
<replace_variables>Y</replace_variables>
<set_params>N</set_params>
<single_statement>N</single_statement>
<sql>INSERT INTO etl.job_execution (
id,
job_name,
status,
execution_datetime,
error_description,
recruitment_id
)
VALUES (
DEFAULT,
'${JOB_NAME}',
'PROCESSING',
DEFAULT,
NULL,
'${IDM_ID}'
)
ON CONFLICT (job_name, recruitment_id)
DO UPDATE SET
<sql>UPDATE etl.job_execution
SET
status = 'PROCESSING',
execution_datetime = DEFAULT,
error_description = NULL;</sql>
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>176</xloc>
<yloc>272</yloc>
<xloc>416</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
@ -254,8 +207,8 @@ DO UPDATE SET
</partitioning>
<attributes/>
<GUI>
<xloc>1312</xloc>
<yloc>272</yloc>
<xloc>832</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
@ -290,15 +243,15 @@ DO UPDATE SET
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1136</xloc>
<yloc>640</yloc>
<xloc>1504</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream 2</name>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -308,76 +261,12 @@ DO UPDATE SET
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>688</xloc>
<yloc>640</yloc>
<xloc>832</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>citizen_information_search</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>name_organ_start_search</name>
<rename>name_organ_start_search</rename>
<update>Y</update>
</value>
<value>
<name>article_criminal_code</name>
<rename>article_criminal_code</rename>
<update>Y</update>
</value>
<value>
<name>wanted_start_date</name>
<rename>wanted_start_date</rename>
<update>Y</update>
</value>
<value>
<name>wanted_end_date</name>
<rename>wanted_end_date</rename>
<update>Y</update>
</value>
<value>
<name>grounds_stopping_search</name>
<rename>grounds_stopping_search</rename>
<update>Y</update>
</value>
<value>
<name>name_organ_end_search</name>
<rename>name_organ_end_search</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>688</xloc>
<yloc>272</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>disability_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -389,79 +278,154 @@ DO UPDATE SET
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<sql>
SELECT
ri.recruit_id,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'naimOrg' AS name_organ_start_search,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'ugolovZakon' AS article_criminal_code,
TO_DATE(ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'dataNachRozisk', 'YYYY-MM-DD') AS wanted_start_date,
TO_DATE(ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'dataPrekrRozysk', 'YYYY-MM-DD') AS wanted_end_date,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'osnovPrekrRozysk' AS grounds_stopping_search,
ri.info -> 'svedFL' -> 'extend' -> 'rozysk' ->> 'naimOrgPrekrRozysk' AS name_organ_end_search
FROM recruits_info ri
JOIN ervu_dashboard.citizen r ON r.recruit_id = ri.recruit_id
WHERE'${IDM_ID}' != ''
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
AND '${M_R_CR_DATE}'::timestamp >= ri.created_at
AND jsonb_typeof(ri.info->'svedFL'->'extend'->'rozysk') = 'object'
</sql>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' AS invalid_elem
FROM recruits_info ri
WHERE ri.info -> 'svedFL' -> 'svedInvalid' -> 'invalid' &lt;&gt; 'null'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
)
SELECT fd.recruit_id,
invalid_elem ->> 'id' AS source_id,
to_date(invalid_elem ->> 'dataSved', 'YYYY-MM-DD') as source_update_date,
invalid_elem ->> 'gruppa' as disability_group,
invalid_elem ->> 'ustanOrg' as disability_establish_org_name,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataUstan', 'YYYY-MM-DD') as register_date,
to_date(invalid_elem -> 'ustanInvalid' ->> 'dataPodtverzhdPlan', 'YYYY-MM-DD') as confirmation_date,
to_date(invalid_elem ->> 'dataSnyat', 'YYYY-MM-DD') as deregistration_date,
coalesce(invalid_elem -> 'ustanInvalid' ->> 'prBessrInvl' = '1', false) as permanent
FROM filteredData fd;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>416</xloc>
<yloc>272</yloc>
<xloc>640</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Update</name>
<type>Update</type>
<name>disability_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>disability</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>disability_group</name>
<rename>disability_group</rename>
<update>Y</update>
</value>
<value>
<name>disability_establish_org_name</name>
<rename>disability_establish_org_name</rename>
<update>Y</update>
</value>
<value>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>confirmation_date</name>
<rename>confirmation_date</rename>
<update>Y</update>
</value>
<value>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
<value>
<name>permanent</name>
<rename>permanent</rename>
<update>Y</update>
</value>
<value>
<name>last_row</name>
<rename>last_row</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1120</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>has_disability_update</name>
<type>Update</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<error_ignored>N</error_ignored>
<ignore_flag_field/>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
<name2/>
</key>
<key>
<condition>&lt;></condition>
<field>has_disability</field>
<name>has_disability</name>
<name2/>
</key>
<schema>ervu_dashboard</schema>
<table>citizen</table>
<value>
<name>is_wanted</name>
<rename>is_wanted</rename>
<name>has_disability</name>
<rename>has_disability</rename>
</value>
</lookup>
<skip_lookup>N</skip_lookup>
<use_batch>N</use_batch>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>992</xloc>
<yloc>640</yloc>
<xloc>1312</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>
</error>
<error>
<source_transform>Update</source_transform>
<source_transform>disability_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
@ -472,6 +436,18 @@ WHERE'${IDM_ID}' != ''
<max_pct_errors/>
<min_pct_rows/>
</error>
<error>
<source_transform>has_disability_update</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<fields_valuename/>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>
</error>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitments_five_flow</name>
<name>recruitment_five_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,41 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/04/18 09:48:01.970</created_date>
<created_date>2025/09/08 19:24:07.006</created_date>
<modified_user>-</modified_user>
<modified_date>2025/04/18 09:48:01.970</modified_date>
<modified_date>2025/09/08 19:24:07.006</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>citizen_spouse_flow.hpl</to>
<from>Get all recruitments</from>
<to>disability_flow.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>citizen_spouse_flow.hpl 2</to>
<from>Get all recruitments</from>
<to>disability_flow.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>citizen_spouse_flow.hpl 3</to>
<from>Get all recruitments</from>
<to>disability_flow.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>citizen_spouse_flow.hpl 4</to>
<from>Get all recruitments</from>
<to>disability_flow.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>citizen_spouse_flow.hpl 5</to>
<from>Get all recruitments</from>
<to>disability_flow.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Get all recruitments ordered by created_date</name>
<name>Get all recruitments</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -57,21 +57,20 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
idm_id AS recruitment
idm_id
FROM ervu_dashboard.recruitment;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>304</yloc>
<xloc>560</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>citizen_spouse_flow.hpl</name>
<name>disability_flow.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -82,7 +81,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/parallel/citizen_spouse_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -91,14 +90,9 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_spouse_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -122,12 +116,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>144</yloc>
<xloc>832</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>citizen_spouse_flow.hpl 2</name>
<name>disability_flow.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -138,7 +132,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/parallel/citizen_spouse_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -147,14 +141,9 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_spouse_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -178,12 +167,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>224</yloc>
<xloc>832</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>citizen_spouse_flow.hpl 3</name>
<name>disability_flow.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -194,7 +183,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/parallel/citizen_spouse_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -203,14 +192,9 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_spouse_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -234,12 +218,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>304</yloc>
<xloc>832</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>citizen_spouse_flow.hpl 4</name>
<name>disability_flow.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -250,7 +234,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/parallel/citizen_spouse_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -259,14 +243,9 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_spouse_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -290,12 +269,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>384</yloc>
<xloc>832</xloc>
<yloc>528</yloc>
</GUI>
</transform>
<transform>
<name>citizen_spouse_flow.hpl 5</name>
<name>disability_flow.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -306,7 +285,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/parallel/citizen_spouse_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -315,14 +294,9 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_spouse_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -346,8 +320,8 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>464</yloc>
<xloc>832</xloc>
<yloc>608</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -13,36 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:38:40.987</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:38:40.987</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>citizen_guardianship_flow_delta.hpl</to>
<to>disability_flow_delta.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>get_max_source_update_date</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizen_guardianship_flow_delta.hpl 2</to>
<to>disability_flow_delta.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizen_guardianship_flow_delta.hpl 3</to>
<to>disability_flow_delta.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizen_guardianship_flow_delta.hpl 4</to>
<to>disability_flow_delta.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizen_guardianship_flow_delta.hpl 5</to>
<to>disability_flow_delta.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +62,115 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<lookup>get_max_source_update_date</lookup>
<sql>WITH mud AS (
SELECT
recruitment_id,
MAX(execution_datetime) AS max_upd_date
FROM etl.job_execution
WHERE job_name = '${JOB_NAME}'
AND status IN ('SUCCESS','DELTA_ERROR','DELTA_SUCCESS','DELTA_PROCESSING')
GROUP BY recruitment_id
)
SELECT
r.idm_id,
? max_source_update_date
FROM ervu_dashboard.recruitment r
JOIN mud ON mud.recruitment_id = r.idm_id
JOIN recruits_info ri
ON COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = r.idm_id
AND ri.updated_at > mud.max_upd_date;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>disability_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>896</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
<name>get_max_source_update_date</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.idm_id as recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'citizen_guardianship_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
<sql>select max(source_update_date)
from disability;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>528</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>citizen_guardianship_flow_delta.hpl</name>
<name>disability_flow_delta.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +181,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_guardianship/parallel/citizen_guardianship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -94,14 +189,14 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_guardianship_job</input>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -126,12 +221,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>128</yloc>
<xloc>896</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>citizen_guardianship_flow_delta.hpl 2</name>
<name>disability_flow_delta.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +237,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_guardianship/parallel/citizen_guardianship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -150,14 +245,14 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_guardianship_job</input>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -182,12 +277,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>208</yloc>
<xloc>896</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>citizen_guardianship_flow_delta.hpl 3</name>
<name>disability_flow_delta.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +293,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_guardianship/parallel/citizen_guardianship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -206,14 +301,14 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_guardianship_job</input>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -238,12 +333,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>288</yloc>
<xloc>896</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
<name>citizen_guardianship_flow_delta.hpl 4</name>
<name>disability_flow_delta.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +349,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_guardianship/parallel/citizen_guardianship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -262,14 +357,14 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_guardianship_job</input>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -294,64 +389,8 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>citizen_guardianship_flow_delta.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_guardianship/parallel/citizen_guardianship_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizen_guardianship_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>448</yloc>
<xloc>896</xloc>
<yloc>656</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow_delta</name>
<name>recruitment_five_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,36 +13,36 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:31:52.575</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:31:52.575</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>property_flow_delta.hpl</to>
<to>disability_flow_repeat.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>property_flow_delta.hpl 2</to>
<to>disability_flow_repeat.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>property_flow_delta.hpl 3</to>
<to>disability_flow_repeat.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>property_flow_delta.hpl 4</to>
<to>disability_flow_repeat.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>property_flow_delta.hpl 5</to>
<to>disability_flow_repeat.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +57,25 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.idm_id as recruitment_id
<sql>SELECT r.idm_id AS recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'property_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
AND je.job_name = 'disability_job'
WHERE je.id IS NULL
OR je.status IN ('ERROR', 'PROCESSING');</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>656</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>property_flow_delta.hpl</name>
<name>disability_flow_repeat.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +86,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/property/parallel/property_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -98,11 +98,6 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>property_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -126,12 +121,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>128</yloc>
<xloc>864</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>property_flow_delta.hpl 2</name>
<name>disability_flow_repeat.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +137,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/property/parallel/property_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -154,11 +149,6 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>property_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -182,12 +172,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>208</yloc>
<xloc>864</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>property_flow_delta.hpl 3</name>
<name>disability_flow_repeat.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +188,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/property/parallel/property_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -210,11 +200,6 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>property_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -238,12 +223,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>288</yloc>
<xloc>864</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>property_flow_delta.hpl 4</name>
<name>disability_flow_repeat.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +239,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/property/parallel/property_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -266,11 +251,6 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>property_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -294,12 +274,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>368</yloc>
<xloc>864</xloc>
<yloc>544</yloc>
</GUI>
</transform>
<transform>
<name>property_flow_delta.hpl 5</name>
<name>disability_flow_repeat.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +290,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/property/parallel/property_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/disability/parallel/disability_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -322,11 +302,6 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>property_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -350,8 +325,8 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>448</yloc>
<xloc>864</xloc>
<yloc>624</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>deferment_decision_update_date_ervu_dashboard</name>
<name>health_state_constants_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,51 +13,19 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/02 14:50:31.833</created_date>
<created_date>2025/09/08 16:43:33.050</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/02 14:50:31.833</modified_date>
<modified_date>2025/09/08 16:43:33.050</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<from>disease_input</from>
<to>Table output</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
created_at AS system_create_date,
CASE
WHEN updated_at is null THEN '0001-01-01 00:00:00'
ELSE updated_at
END as system_update_date,
current_timestamp AS record_created,
'deferment_decision_table' AS workflow
FROM ervu_dashboard.deferment_liberation
WHERE updated_at = (SELECT MAX(updated_at) FROM ervu_dashboard.deferment_liberation)
limit 1</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>896</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table output</name>
<type>TableOutput</type>
@ -69,27 +37,71 @@ limit 1</sql>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<fields>
</fields>
<field>
<column_name>code</column_name>
<stream_name>code</stream_name>
</field>
<field>
<column_name>value</column_name>
<stream_name>value</stream_name>
</field>
<field>
<column_name>actual</column_name>
<stream_name>actual</stream_name>
</field>
<field>
<column_name>update_date</column_name>
<stream_name>update_date</stream_name>
</field>
</fields>
<ignore_errors>N</ignore_errors>
<only_when_have_rows>N</only_when_have_rows>
<partitioning_daily>N</partitioning_daily>
<partitioning_enabled>N</partitioning_enabled>
<partitioning_monthly>Y</partitioning_monthly>
<return_keys>N</return_keys>
<schema>public</schema>
<specify_fields>N</specify_fields>
<table>recruit_create</table>
<schema>ervu_dashboard</schema>
<specify_fields>Y</specify_fields>
<table>disease</table>
<tablename_in_field>N</tablename_in_field>
<tablename_in_table>Y</tablename_in_table>
<truncate>N</truncate>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1216</xloc>
<yloc>304</yloc>
<xloc>608</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>disease_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>nsi</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
key as code,
value ->> 'value' as value,
NOT hidden as actual,
updated_at as update_date
FROM classifier_records
WHERE code = 'diseaseCode';</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>400</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruit_archivation_reason</name>
<name>health_state_constants_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,91 +13,26 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/09/09 16:35:56.339</created_date>
<created_date>2025/09/22 17:34:05.067</created_date>
<modified_user>-</modified_user>
<modified_date>2025/09/09 16:35:56.339</modified_date>
<modified_date>2025/09/22 17:34:05.067</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<from>get_max_disease_update_date</from>
<to>disease_input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>disease_input</from>
<to>minobr_disease_upsert</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>id</field>
<name>id</name>
<name2/>
</key>
<schema>ervu_dashboard</schema>
<table>recruit_archivation_reason</table>
<value>
<name>value</name>
<rename>value</rename>
<update>Y</update>
</value>
<value>
<name>code</name>
<rename>code</rename>
<update>Y</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<update>Y</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<update>Y</update>
</value>
<value>
<name>description</name>
<rename>description</rename>
<update>Y</update>
</value>
<value>
<name>extra_info</name>
<rename>extra_info</rename>
<update>Y</update>
</value>
<value>
<name>hidden</name>
<rename>hidden</rename>
<update>Y</update>
</value>
<value>
<name>id</name>
<rename>id</rename>
<update>N</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>848</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>disease_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -107,16 +42,98 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu_person_archive</connection>
<connection>nsi</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT *
from public.recruit_archivation_reason</sql>
<lookup>get_max_disease_update_date</lookup>
<sql>SELECT
key as code,
value ->> 'value' as value,
NOT hidden as actual,
updated_at as update_date
FROM classifier_records
WHERE code = 'diseaseCode'
AND updated_at > ?;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>448</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
<name>get_max_disease_update_date</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
max(update_date) as max_update_date
FROM disease;
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>480</xloc>
<yloc>288</yloc>
<xloc>224</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
<name>minobr_disease_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>code</field>
<name>code</name>
<name2/>
</key>
<schema>ervu_dashboard</schema>
<table>disease</table>
<value>
<name>code</name>
<rename>code</rename>
<update>N</update>
</value>
<value>
<name>value</name>
<rename>value</rename>
<update>Y</update>
</value>
<value>
<name>actual</name>
<rename>actual</rename>
<update>Y</update>
</value>
<value>
<name>update_date</name>
<rename>update_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>check_if_need_to_repeat_job</name>
<name>check_if_constants_exists</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,9 +13,9 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/08 12:27:51.630</created_date>
<created_date>2025/09/22 17:28:33.897</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/08 12:27:51.630</modified_date>
<modified_date>2025/09/22 17:28:33.897</modified_date>
</info>
<notepads>
</notepads>
@ -39,16 +39,17 @@
</partitioning>
<fields>
<field>
<field_name>need_to_repeat_job</field_name>
<variable_name>NEED_TO_REPEAT_JOB</variable_name>
<variable_type>ROOT_WORKFLOW</variable_type>
<default_value/>
<field_name>constants_exists</field_name>
<variable_name>CONSTANTS_EXISTS</variable_name>
<variable_type>PARENT_WORKFLOW</variable_type>
</field>
</fields>
<use_formatting>Y</use_formatting>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>288</yloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
@ -65,18 +66,14 @@
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT EXISTS (SELECT 1
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'work_activity_job'
where je.status is null or je.status in ('ERROR', 'PROCESSING')
) as need_to_repeat_job;</sql>
<variables_active>N</variables_active>
<sql>SELECT EXISTS (
SELECT 1 FROM disease
) AS constants_exists</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>336</xloc>
<yloc>288</yloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -0,0 +1,414 @@
<?xml version="1.0" encoding="UTF-8"?>
<workflow>
<name>health_state_job</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<workflow_version/>
<created_user>-</created_user>
<created_date>2025/09/08 20:45:23.262</created_date>
<modified_user>-</modified_user>
<modified_date>2025/09/08 20:45:23.262</modified_date>
<parameters>
</parameters>
<actions>
<action>
<name>Start</name>
<description/>
<type>SPECIAL</type>
<attributes/>
<DayOfMonth>1</DayOfMonth>
<hour>12</hour>
<intervalMinutes>60</intervalMinutes>
<intervalSeconds>0</intervalSeconds>
<minutes>0</minutes>
<repeat>N</repeat>
<schedulerType>0</schedulerType>
<weekDay>1</weekDay>
<parallel>N</parallel>
<xloc>256</xloc>
<yloc>256</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_job_execution_exists.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_job_execution_exists.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1136</xloc>
<yloc>272</yloc>
<attributes_hac/>
</action>
<action>
<name>employer_job_execution_exists_check</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
<fieldtype>boolean</fieldtype>
<successbooleancondition>false</successbooleancondition>
<successcondition>equal</successcondition>
<successnumbercondition>equal</successnumbercondition>
<successwhenvarset>N</successwhenvarset>
<valuetype>variable</valuetype>
<variablename>JOB_EXECUTED_FLAG</variablename>
<parallel>N</parallel>
<xloc>1440</xloc>
<yloc>272</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/recruitment_five_flow.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>Y</parallel>
<xloc>1744</xloc>
<yloc>272</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/recruitment_five_flow_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1744</xloc>
<yloc>576</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_need_to_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_need_to_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1440</xloc>
<yloc>416</yloc>
<attributes_hac/>
</action>
<action>
<name>Simple evaluation</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
<fieldtype>boolean</fieldtype>
<successbooleancondition>true</successbooleancondition>
<successcondition>equal</successcondition>
<successnumbercondition>equal</successnumbercondition>
<successwhenvarset>N</successwhenvarset>
<valuetype>variable</valuetype>
<variablename>NEED_TO_REPEAT_JOB</variablename>
<parallel>N</parallel>
<xloc>1440</xloc>
<yloc>576</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow_delta.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/recruitment_five_flow_delta.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1440</xloc>
<yloc>768</yloc>
<attributes_hac/>
</action>
<action>
<name>init_job_name</name>
<description/>
<type>SET_VARIABLES</type>
<attributes/>
<fields>
<field>
<variable_name>JOB_NAME</variable_name>
<variable_type>CURRENT_WORKFLOW</variable_type>
<variable_value>health_state_job</variable_value>
</field>
</fields>
<file_variable_type>CURRENT_WORKFLOW</file_variable_type>
<replacevars>N</replacevars>
<parallel>N</parallel>
<xloc>944</xloc>
<yloc>272</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_constants_exists.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/constants/support/check_if_constants_exists.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>384</xloc>
<yloc>256</yloc>
<attributes_hac/>
</action>
<action>
<name>constants_exists</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
<fieldtype>boolean</fieldtype>
<successbooleancondition>true</successbooleancondition>
<successcondition>equal</successcondition>
<successnumbercondition>equal</successnumbercondition>
<successwhenvarset>N</successwhenvarset>
<valuetype>variable</valuetype>
<variablename>CONSTANTS_EXISTS</variablename>
<parallel>N</parallel>
<xloc>560</xloc>
<yloc>256</yloc>
<attributes_hac/>
</action>
<action>
<name>health_state_constants_flow.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/constants/health_state_constants_flow.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>752</xloc>
<yloc>176</yloc>
<attributes_hac/>
</action>
<action>
<name>health_state_constants_flow_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/constants/health_state_constants_flow_repeat.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>752</xloc>
<yloc>352</yloc>
<attributes_hac/>
</action>
</actions>
<hops>
<hop>
<from>check_if_job_execution_exists.hpl</from>
<to>employer_job_execution_exists_check</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>employer_job_execution_exists_check</from>
<to>check_if_need_to_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>check_if_need_to_repeat.hpl</from>
<to>Simple evaluation</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>init_job_name</from>
<to>check_if_job_execution_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>check_if_constants_exists.hpl</from>
<to>constants_exists</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>constants_exists</from>
<to>health_state_constants_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>constants_exists</from>
<to>health_state_constants_flow_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Start</from>
<to>check_if_constants_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>health_state_constants_flow.hpl</from>
<to>init_job_name</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>health_state_constants_flow_repeat.hpl</from>
<to>init_job_name</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple evaluation</from>
<to>recruitment_five_flow_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>employer_job_execution_exists_check</from>
<to>recruitment_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple evaluation</from>
<to>recruitment_five_flow_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
</hops>
<notepads>
</notepads>
<attributes/>
</workflow>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>restriction_document_flow</name>
<name>health_state_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,16 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 14:31:22.799</created_date>
<created_date>2025/09/08 21:46:18.176</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 14:31:22.799</modified_date>
<modified_date>2025/09/08 21:46:18.176</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<to>health_state_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -30,41 +35,46 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>health_state_output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<from>health_state_output</from>
<to>has_health_state_update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>has_health_state_update</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Table output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<from>has_health_state_update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -77,13 +87,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1376</xloc>
<yloc>512</yloc>
<xloc>1264</xloc>
<yloc>672</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +127,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>512</yloc>
<xloc>1040</xloc>
<yloc>672</yloc>
</GUI>
</transform>
<transform>
@ -148,8 +158,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1632</xloc>
<yloc>160</yloc>
<xloc>1504</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
@ -177,8 +187,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</sql>
<attributes/>
<GUI>
<xloc>288</xloc>
<yloc>320</yloc>
<xloc>336</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
@ -194,8 +204,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</partitioning>
<attributes/>
<GUI>
<xloc>1632</xloc>
<yloc>320</yloc>
<xloc>752</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
@ -230,15 +240,15 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>160</yloc>
<xloc>1504</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -249,107 +259,111 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>320</yloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<name>has_health_state_update</name>
<type>Update</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<error_ignored>N</error_ignored>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>citizen</table>
<value>
<name>has_health_state_info</name>
<rename>has_health_state_info</rename>
</value>
</lookup>
<skip_lookup>N</skip_lookup>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1264</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>health_state_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
*
FROM public.restriction_document
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND vk_id = '${IDM_ID}'
AND '${M_R_CR_DATE}' >= created_at
${LIMIT_FW}</sql>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' ->> 'dataSved' as source_update_date,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov' as zdorov_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov') = 'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}')
SELECT fd.recruit_id,
zdorov_elem ->> 'id' AS source_id,
source_update_date,
zdorov_elem ->> 'kodZabol' AS disease_code,
to_date(zdorov_elem ->> 'dataDiagn', 'YYYY-MM-DD') AS diagnosis_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(zdorov_arr) AS zdorov_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>496</xloc>
<yloc>320</yloc>
<xloc>560</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>Table output</name>
<name>health_state_output</name>
<type>TableOutput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<fields>
<field>
<column_name>id</column_name>
<stream_name>id</stream_name>
<column_name>recruit_id</column_name>
<stream_name>recruit_id</stream_name>
</field>
<field>
<column_name>subpoena_id</column_name>
<stream_name>subpoena_id</stream_name>
<column_name>source_id</column_name>
<stream_name>source_id</stream_name>
</field>
<field>
<column_name>created_at</column_name>
<stream_name>created_at</stream_name>
<column_name>source_update_date</column_name>
<stream_name>source_update_date</stream_name>
</field>
<field>
<column_name>updated_at</column_name>
<stream_name>updated_at</stream_name>
<column_name>disease_code</column_name>
<stream_name>disease_code</stream_name>
</field>
<field>
<column_name>vk_id</column_name>
<stream_name>vk_id</stream_name>
</field>
<field>
<column_name>user_id</column_name>
<stream_name>user_id</stream_name>
</field>
<field>
<column_name>user_name</column_name>
<stream_name>user_name</stream_name>
</field>
<field>
<column_name>status</column_name>
<stream_name>status</stream_name>
</field>
<field>
<column_name>decision_number</column_name>
<stream_name>decision_number</stream_name>
</field>
<field>
<column_name>decision_date</column_name>
<stream_name>decision_date</stream_name>
</field>
<field>
<column_name>decision_reason</column_name>
<stream_name>decision_reason</stream_name>
</field>
<field>
<column_name>extra_info</column_name>
<stream_name>extra_info</stream_name>
</field>
<field>
<column_name>type</column_name>
<stream_name>type</stream_name>
</field>
<field>
<column_name>recruitment_name</column_name>
<stream_name>recruitment_name</stream_name>
<column_name>diagnosis_date</column_name>
<stream_name>diagnosis_date</stream_name>
</field>
</fields>
<ignore_errors>N</ignore_errors>
@ -360,26 +374,38 @@ ${LIMIT_FW}</sql>
<return_keys>N</return_keys>
<schema>ervu_dashboard</schema>
<specify_fields>Y</specify_fields>
<table>restriction_document</table>
<table>health_state</table>
<tablename_in_field>N</tablename_in_field>
<tablename_in_table>Y</tablename_in_table>
<truncate>N</truncate>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>320</yloc>
<xloc>1040</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Table output</source_transform>
<source_transform>has_health_state_update</source_transform>
<target_transform>Abort</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>
</error>
<error>
<source_transform>health_state_output</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>subpoena_send_info_flow_delta</name>
<name>health_state_flow_delta</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,26 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:22:56.048</created_date>
<created_date>2025/09/08 22:10:33.736</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:22:56.048</modified_date>
<modified_date>2025/09/08 22:10:33.736</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<to>health_state_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -40,31 +35,36 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>Identify last row in a stream</from>
<to>health_state_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1168</xloc>
<yloc>400</yloc>
<xloc>1584</xloc>
<yloc>704</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>400</yloc>
<xloc>1360</xloc>
<yloc>704</yloc>
</GUI>
</transform>
<transform>
@ -148,8 +148,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>64</yloc>
<xloc>1584</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
@ -174,14 +174,14 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET
status = 'DELTA_PROCESSING',
execution_datetime = DEFAULT,
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>336</xloc>
<yloc>224</yloc>
<xloc>656</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
@ -197,8 +197,8 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>224</yloc>
<xloc>1072</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
@ -233,15 +233,15 @@ and recruitment_id = '${IDM_ID}';</sql>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>64</yloc>
<xloc>1584</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -251,145 +251,108 @@ and recruitment_id = '${IDM_ID}';</sql>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>736</xloc>
<yloc>224</yloc>
<xloc>1072</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<name>health_state_input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' ->> 'dataSved' as source_update_date,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov' as zdorov_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov') =
'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
and to_date(ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' ->> 'dataSved',
'YYYY-MM-DD') > '${MAX_SOURCE_UPDATE_DATE}')
SELECT fd.recruit_id,
zdorov_elem ->> 'id' AS source_id,
source_update_date,
zdorov_elem ->> 'kodZabol' AS disease_code,
to_date(zdorov_elem ->> 'dataDiagn', 'YYYY-MM-DD') AS diagnosis_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(zdorov_arr) AS zdorov_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>880</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
<name>health_state_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>id</field>
<name>id</name>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>subpoena_send_info</table>
<table>health_state</table>
<value>
<name>id</name>
<rename>id</rename>
<update>N</update>
</value>
<value>
<name>subpoena_id</name>
<rename>subpoena_id</rename>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>Y</update>
</value>
<value>
<name>send_code</name>
<rename>send_code</rename>
<name>source_id</name>
<rename>source_id</rename>
<update>Y</update>
</value>
<value>
<name>send_address</name>
<rename>send_address</rename>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>send_address_type</name>
<rename>send_address_type</rename>
<name>disease_code</name>
<rename>disease_code</rename>
<update>Y</update>
</value>
<value>
<name>send_date</name>
<rename>send_date</rename>
<update>Y</update>
</value>
<value>
<name>track_number</name>
<rename>track_number</rename>
<update>Y</update>
</value>
<value>
<name>is_delivered</name>
<rename>is_delivered</rename>
<update>Y</update>
</value>
<value>
<name>delivery_code</name>
<rename>delivery_code</rename>
<update>Y</update>
</value>
<value>
<name>delivery_date</name>
<rename>delivery_date</rename>
<update>Y</update>
</value>
<value>
<name>act_number</name>
<rename>act_number</rename>
<update>Y</update>
</value>
<value>
<name>act_date</name>
<rename>act_date</rename>
<update>Y</update>
</value>
<value>
<name>delivery_fio</name>
<rename>delivery_fio</rename>
<update>Y</update>
</value>
<value>
<name>auto_delivery</name>
<rename>auto_delivery</rename>
<name>diagnosis_date</name>
<rename>diagnosis_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>224</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>select ssi.*
from public.subpoena_send_info ssi
join public.subpoena s on s.id = ssi.subpoena_id
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND s.department_id = '${IDM_ID}'
AND s.status_change_date >= '${M_R_UP_DATE}'::timestamp
${LIMIT_FW}</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>224</yloc>
<xloc>1360</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>health_state_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>subpoena_appearance_flow_delta</name>
<name>health_state_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,26 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:22:56.048</created_date>
<created_date>2025/09/08 21:48:46.051</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:22:56.048</modified_date>
<modified_date>2025/09/08 21:48:46.051</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<to>health_state_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -40,31 +35,36 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>health_state_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>health_state_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>health_state_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1168</xloc>
<yloc>400</yloc>
<yloc>640</yloc>
</GUI>
</transform>
<transform>
@ -109,7 +109,7 @@
<set_params>Y</set_params>
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET status = 'DELTA_ERROR',
SET status = 'ERROR',
error_description = ?
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';
@ -118,7 +118,7 @@ and recruitment_id = '${IDM_ID}';
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>400</yloc>
<yloc>640</yloc>
</GUI>
</transform>
<transform>
@ -141,15 +141,15 @@ and recruitment_id = '${IDM_ID}';
<set_params>N</set_params>
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET status = 'DELTA_SUCCESS'
SET status = 'SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>64</yloc>
<xloc>1168</xloc>
<yloc>272</yloc>
</GUI>
</transform>
<transform>
@ -173,15 +173,15 @@ and recruitment_id = '${IDM_ID}';
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET
status = 'DELTA_PROCESSING',
execution_datetime = DEFAULT,
status = 'PROCESSING',
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>336</xloc>
<yloc>224</yloc>
<xloc>240</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
@ -197,8 +197,8 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>224</yloc>
<xloc>656</xloc>
<yloc>272</yloc>
</GUI>
</transform>
<transform>
@ -233,15 +233,15 @@ and recruitment_id = '${IDM_ID}';</sql>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>64</yloc>
<xloc>1168</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -251,69 +251,89 @@ and recruitment_id = '${IDM_ID}';</sql>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>736</xloc>
<yloc>224</yloc>
<xloc>656</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<name>health_state_input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' ->> 'dataSved' as source_update_date,
ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov' as zdorov_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedSostZdorov' -> 'svedZdorov' -> 'sostZdorov') =
'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}')
SELECT fd.recruit_id,
zdorov_elem ->> 'id' AS source_id,
source_update_date,
zdorov_elem ->> 'kodZabol' AS disease_code,
to_date(zdorov_elem ->> 'dataDiagn', 'YYYY-MM-DD') AS diagnosis_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(zdorov_arr) AS zdorov_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>health_state_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>id</field>
<name>id</name>
</key>
<key>
<condition>=</condition>
<field>created_date_time</field>
<name>created_date_time</name>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>subpoena_appearance</table>
<table>health_state</table>
<value>
<name>id</name>
<rename>id</rename>
<update>N</update>
</value>
<value>
<name>created_date_time</name>
<rename>created_date_time</rename>
<update>N</update>
</value>
<value>
<name>subpoena_id</name>
<rename>subpoena_id</rename>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>Y</update>
</value>
<value>
<name>fact_appearance</name>
<rename>fact_appearance</rename>
<name>source_id</name>
<rename>source_id</rename>
<update>Y</update>
</value>
<value>
<name>date_appearance</name>
<rename>date_appearance</rename>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>nonappearance_info</name>
<rename>nonappearance_info</rename>
<name>disease_code</name>
<rename>disease_code</rename>
<update>Y</update>
</value>
<value>
<name>change_date_time</name>
<rename>change_date_time</rename>
<name>diagnosis_date</name>
<rename>diagnosis_date</rename>
<update>Y</update>
</value>
</lookup>
@ -321,46 +341,16 @@ and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>224</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>select sa.*
from public.subpoena_appearance sa
join public.subpoena s on s.id = sa.subpoena_id
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND s.department_id = '${IDM_ID}'
AND sa.change_date_time >= '${M_R_UP_DATE}'::timestamp
${LIMIT_FW}</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>224</yloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>health_state_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow_delta</name>
<name>recruitment_five_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,41 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:59:52.647</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:59:52.647</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>punishment_flow_delta.hpl</to>
<from>Get all recruitments</from>
<to>health_state_flow.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_delta.hpl 2</to>
<from>Get all recruitments</from>
<to>health_state_flow.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_delta.hpl 3</to>
<from>Get all recruitments</from>
<to>health_state_flow.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_delta.hpl 4</to>
<from>Get all recruitments</from>
<to>health_state_flow.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_delta.hpl 5</to>
<from>Get all recruitments</from>
<to>health_state_flow.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Table input</name>
<name>Get all recruitments</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -57,25 +57,20 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
r.idm_id as recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'punishment_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
<sql>SELECT
idm_id
FROM ervu_dashboard.recruitment;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>480</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_delta.hpl</name>
<name>health_state_flow.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +81,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -95,14 +90,9 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -126,12 +116,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>128</yloc>
<xloc>752</xloc>
<yloc>272</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_delta.hpl 2</name>
<name>health_state_flow.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +132,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -151,14 +141,9 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -182,12 +167,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>208</yloc>
<xloc>752</xloc>
<yloc>352</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_delta.hpl 3</name>
<name>health_state_flow.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +183,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -207,14 +192,9 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -238,12 +218,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>288</yloc>
<xloc>752</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_delta.hpl 4</name>
<name>health_state_flow.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +234,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -263,14 +243,9 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -294,12 +269,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>368</yloc>
<xloc>752</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_delta.hpl 5</name>
<name>health_state_flow.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +285,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -319,14 +294,9 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -350,8 +320,8 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>448</yloc>
<xloc>752</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -13,36 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 21:58:26.774</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 21:58:26.774</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>deferment_liberation_flow_delta.hpl</to>
<to>health_state_flow_delta.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>get_max_source_update_date</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>deferment_liberation_flow_delta.hpl 2</to>
<to>health_state_flow_delta.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>deferment_liberation_flow_delta.hpl 3</to>
<to>health_state_flow_delta.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>deferment_liberation_flow_delta.hpl 4</to>
<to>health_state_flow_delta.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>deferment_liberation_flow_delta.hpl 5</to>
<to>health_state_flow_delta.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,26 +62,36 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<lookup>get_max_source_update_date</lookup>
<sql>SELECT
r.idm_id as recruitment_id
r.idm_id,
? max_source_update_date
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'deferment_liberation_job'
and job_name = 'health_state_job'
JOIN recruits_info ri
ON COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = r.idm_id
AND ri.updated_at > (
SELECT MAX(execution_datetime)
FROM etl.job_execution
WHERE job_name = 'health_state_job'
AND recruitment_id = r.idm_id
)
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>816</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>deferment_liberation_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<name>get_max_source_update_date</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
@ -85,53 +100,20 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/deferment_liberation/parallel/deferment_liberation_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>deferment_liberation_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>select max(source_update_date)
from health_state;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>128</yloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>deferment_liberation_flow_delta.hpl 2</name>
<name>health_state_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +124,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/deferment_liberation/parallel/deferment_liberation_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -151,13 +133,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>deferment_liberation_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -182,12 +164,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>208</yloc>
<xloc>1008</xloc>
<yloc>272</yloc>
</GUI>
</transform>
<transform>
<name>deferment_liberation_flow_delta.hpl 3</name>
<name>health_state_flow_delta.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +180,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/deferment_liberation/parallel/deferment_liberation_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -207,13 +189,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>deferment_liberation_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -238,12 +220,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>288</yloc>
<xloc>1008</xloc>
<yloc>352</yloc>
</GUI>
</transform>
<transform>
<name>deferment_liberation_flow_delta.hpl 4</name>
<name>health_state_flow_delta.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +236,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/deferment_liberation/parallel/deferment_liberation_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -263,13 +245,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>deferment_liberation_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -294,12 +276,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>368</yloc>
<xloc>1008</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>deferment_liberation_flow_delta.hpl 5</name>
<name>health_state_flow_delta.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +292,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/deferment_liberation/parallel/deferment_liberation_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -319,13 +301,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>deferment_liberation_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -350,8 +332,64 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>448</yloc>
<xloc>1008</xloc>
<yloc>512</yloc>
</GUI>
</transform>
<transform>
<name>health_state_flow_delta.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>1008</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow_on_error</name>
<name>recruitment_five_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,36 +13,36 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 12:54:50.126</created_date>
<created_date>2025/09/08 21:44:04.866</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 12:54:50.126</modified_date>
<modified_date>2025/09/08 21:44:04.866</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>passport_flow_repeat.hpl</to>
<to>health_state_flow_repeat.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>passport_flow_repeat.hpl 3</to>
<to>health_state_flow_repeat.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>passport_flow_repeat.hpl 2</to>
<to>health_state_flow_repeat.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>passport_flow_repeat.hpl 4</to>
<to>health_state_flow_repeat.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>passport_flow_repeat.hpl 5</to>
<to>health_state_flow_repeat.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +57,25 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
r.idm_id as recruitment_id
<limit>0</limit>
<sql>SELECT r.idm_id AS recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'passport_job'
where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
AND je.job_name = 'health_state_job'
WHERE je.id IS NULL
OR je.status IN ('ERROR', 'PROCESSING');</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>272</xloc>
<yloc>368</yloc>
<xloc>400</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow_repeat.hpl</name>
<name>health_state_flow_repeat.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +86,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -98,11 +98,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -126,12 +121,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>208</yloc>
<xloc>608</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow_repeat.hpl 2</name>
<name>health_state_flow_repeat.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +137,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -154,11 +149,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -182,12 +172,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>288</yloc>
<xloc>608</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow_repeat.hpl 3</name>
<name>health_state_flow_repeat.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +188,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -210,11 +200,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -238,12 +223,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>368</yloc>
<xloc>608</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow_repeat.hpl 4</name>
<name>health_state_flow_repeat.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +239,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -266,11 +251,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -294,12 +274,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>448</yloc>
<xloc>608</xloc>
<yloc>544</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow_repeat.hpl 5</name>
<name>health_state_flow_repeat.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +290,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/health_state/parallel/health_state_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -322,11 +302,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -350,8 +325,8 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>528</yloc>
<xloc>608</xloc>
<yloc>624</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,27 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<workflow>
<name>job_citizen_address</name>
<name>incapacity_job</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<workflow_version/>
<workflow_status>0</workflow_status>
<created_user>-</created_user>
<created_date>2025/06/05 14:27:15.055</created_date>
<created_date>2025/09/08 16:38:17.982</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/05 14:27:15.055</modified_date>
<modified_date>2025/09/08 16:38:17.982</modified_date>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value>3001-01-01 00:00:00</default_value>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
</parameters>
<actions>
<action>
<name>Start</name>
@ -37,12 +26,12 @@
<schedulerType>0</schedulerType>
<weekDay>1</weekDay>
<parallel>N</parallel>
<xloc>416</xloc>
<yloc>208</yloc>
<xloc>96</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_citizen_address_job_exists.hpl</name>
<name>check_if_job_execution_exists.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -52,7 +41,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_address/checkpoints/check_if_citizen_address_job_exists.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_job_execution_exists.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -63,12 +52,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>608</xloc>
<yloc>208</yloc>
<xloc>480</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>citizen_address_job_exists_check</name>
<name>employer_job_execution_exists_check</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
@ -80,12 +69,12 @@
<valuetype>variable</valuetype>
<variablename>JOB_EXECUTED_FLAG</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>208</yloc>
<xloc>784</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitments_five_flow.hpl</name>
<name>recruitment_five_flow.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -95,7 +84,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_address/recruitments_five_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/recruitment_five_flow.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -106,12 +95,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>Y</parallel>
<xloc>1120</xloc>
<yloc>208</yloc>
<xloc>1104</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow_on_error.hpl</name>
<name>recruitment_five_flow_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -121,7 +110,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_address/recruitment_five_flow_on_error.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/recruitment_five_flow_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -132,12 +121,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1120</xloc>
<yloc>512</yloc>
<xloc>1040</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_need_to_repeat_job.hpl</name>
<name>check_if_need_to_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -147,7 +136,9 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_address/check_if_need_to_repeat_job.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_need_to_repeat.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -158,8 +149,8 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>368</yloc>
<xloc>784</xloc>
<yloc>304</yloc>
<attributes_hac/>
</action>
<action>
@ -167,12 +158,7 @@
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
<comparevalue/>
<fieldname/>
<fieldtype>boolean</fieldtype>
<mask/>
<maxvalue/>
<minvalue/>
<successbooleancondition>true</successbooleancondition>
<successcondition>equal</successcondition>
<successnumbercondition>equal</successnumbercondition>
@ -180,8 +166,8 @@
<valuetype>variable</valuetype>
<variablename>NEED_TO_REPEAT_JOB</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>512</yloc>
<xloc>784</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
@ -195,7 +181,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_address/recruitment_five_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/recruitment_five_flow_delta.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -206,50 +192,62 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<xloc>784</xloc>
<yloc>704</yloc>
<attributes_hac/>
</action>
<action>
<name>init_job_name</name>
<description/>
<type>SET_VARIABLES</type>
<attributes/>
<fields>
<field>
<variable_name>JOB_NAME</variable_name>
<variable_type>CURRENT_WORKFLOW</variable_type>
<variable_value>incapacity_job</variable_value>
</field>
</fields>
<file_variable_type>CURRENT_WORKFLOW</file_variable_type>
<replacevars>N</replacevars>
<parallel>N</parallel>
<xloc>256</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
</actions>
<hops>
<hop>
<from>Start</from>
<to>check_if_citizen_address_job_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>check_if_citizen_address_job_exists.hpl</from>
<to>citizen_address_job_exists_check</to>
<from>check_if_job_execution_exists.hpl</from>
<to>employer_job_execution_exists_check</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_address_job_exists_check</from>
<to>recruitments_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_address_job_exists_check</from>
<to>check_if_need_to_repeat_job.hpl</to>
<from>employer_job_execution_exists_check</from>
<to>check_if_need_to_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>check_if_need_to_repeat_job.hpl</from>
<from>check_if_need_to_repeat.hpl</from>
<to>Simple evaluation</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>employer_job_execution_exists_check</from>
<to>recruitment_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple evaluation</from>
<to>recruitment_five_flow_on_error.hpl</to>
<to>recruitment_five_flow_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
@ -261,6 +259,20 @@
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Start</from>
<to>init_job_name</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>init_job_name</from>
<to>check_if_job_execution_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
</hops>
<notepads>
</notepads>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>passport_flow</name>
<name>incapacity_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,46 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 14:31:22.799</created_date>
<created_date>2025/09/08 19:24:20.455</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 14:31:22.799</modified_date>
<modified_date>2025/09/08 19:24:20.455</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Table output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Filter rows</to>
<to>incapacity_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -61,8 +36,33 @@
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>incapacity_output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_output</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1264</xloc>
<yloc>608</yloc>
<xloc>1216</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>896</xloc>
<yloc>608</yloc>
<xloc>992</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
@ -143,14 +143,13 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET status = 'SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}
and status = 'PROCESSING';
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1264</xloc>
<yloc>176</yloc>
<xloc>1216</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -179,7 +178,7 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<attributes/>
<GUI>
<xloc>288</xloc>
<yloc>320</yloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
@ -195,8 +194,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</partitioning>
<attributes/>
<GUI>
<xloc>896</xloc>
<yloc>176</yloc>
<xloc>704</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -231,15 +230,15 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1264</xloc>
<yloc>320</yloc>
<xloc>1216</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -249,12 +248,12 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>320</yloc>
<xloc>704</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>incapacity_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -267,51 +266,41 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
ri.recruit_id,
NULLIF(p.passport_data->>'nomDok', '') AS passport_number,
NULLIF(p.passport_data->>'serDok', '') AS passport_series,
NULLIF(p.passport_data->>'vydDok', '') AS organization_name,
NULLIF(p.passport_data->>'kodVydDok', '') AS unit_code,
TO_DATE(NULLIF(p.passport_data->>'dataDok', ''), 'YYYY-MM-DD') AS issue_date,
CASE
WHEN NULLIF(p.passport_data->>'kodStatus', '') = '1' THEN TRUE
ELSE FALSE
END AS actual
FROM recruits_info ri
JOIN ervu_dashboard.citizen r
ON r.recruit_id = ri.recruit_id
AND '${IDM_ID}' != ''
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
AND '${M_R_CR_DATE}'::timestamp >= r.recruit_create_date
JOIN LATERAL (
SELECT passport_data
FROM UNNEST(ARRAY[
ri.info->'svedFL'->'svedBS'->'pasportRF'->'aktPasportRF',
ri.info->'svedFL'->'svedBS'->'pasportRF'->'predPasportRF'
]) AS passport_data
WHERE passport_data IS NOT NULL AND passport_data::text &lt;&gt; 'null'
) AS p ON TRUE
</sql>
<sql>WITH filteredData AS (
SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob' as nedeesposob_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') = 'array'
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
)
SELECT fd.recruit_id,
nedeesposob_elem ->> 'id' AS source_id,
to_date(nedeesposob_elem ->> 'dataSved', 'YYYY-MM-DD') AS source_update_date,
nedeesposob_elem ->> 'naimStatus' AS capacity_status,
nedeesposob_elem ->> 'naimOrg' AS court_name,
to_date(nedeesposob_elem ->> 'dataPrisv', 'YYYY-MM-DD') AS register_date,
to_date(nedeesposob_elem ->> 'dataOkonch', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(fd.nedeesposob_arr -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') AS nedeesposob_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>496</xloc>
<yloc>320</yloc>
<xloc>512</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table output</name>
<name>incapacity_output</name>
<type>TableOutput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<fields>
<field>
@ -319,28 +308,28 @@ JOIN LATERAL (
<stream_name>recruit_id</stream_name>
</field>
<field>
<column_name>passport_number</column_name>
<stream_name>passport_number</stream_name>
<column_name>source_id</column_name>
<stream_name>source_id</stream_name>
</field>
<field>
<column_name>passport_series</column_name>
<stream_name>passport_series</stream_name>
<column_name>source_update_date</column_name>
<stream_name>source_update_date</stream_name>
</field>
<field>
<column_name>organization_name</column_name>
<stream_name>organization_name</stream_name>
<column_name>capacity_status</column_name>
<stream_name>capacity_status</stream_name>
</field>
<field>
<column_name>unit_code</column_name>
<stream_name>unit_code</stream_name>
<column_name>court_name</column_name>
<stream_name>court_name</stream_name>
</field>
<field>
<column_name>issue_date</column_name>
<stream_name>issue_date</stream_name>
<column_name>register_date</column_name>
<stream_name>register_date</stream_name>
</field>
<field>
<column_name>actual</column_name>
<stream_name>actual</stream_name>
<column_name>deregistration_date</column_name>
<stream_name>deregistration_date</stream_name>
</field>
</fields>
<ignore_errors>N</ignore_errors>
@ -351,26 +340,26 @@ JOIN LATERAL (
<return_keys>N</return_keys>
<schema>ervu_dashboard</schema>
<specify_fields>Y</specify_fields>
<table>passport</table>
<table>incapacity</table>
<tablename_in_field>N</tablename_in_field>
<tablename_in_table>Y</tablename_in_table>
<truncate>N</truncate>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>896</xloc>
<yloc>320</yloc>
<xloc>992</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Table output</source_transform>
<source_transform>incapacity_output</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>passport_flow_delta</name>
<name>incapacity_flow_delta</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,46 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:22:56.048</created_date>
<created_date>2025/09/08 20:36:38.686</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:22:56.048</modified_date>
<modified_date>2025/09/08 20:36:38.686</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Filter rows</to>
<to>incapacity_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -61,8 +36,33 @@
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>incapacity_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1008</xloc>
<yloc>464</yloc>
<xloc>1744</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>464</yloc>
<xloc>1520</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -143,14 +143,13 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET status = 'DELTA_SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}'
and status = 'DELTA_PROCESSING';
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1120</xloc>
<yloc>80</yloc>
<xloc>1744</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
@ -175,14 +174,14 @@ and status = 'DELTA_PROCESSING';
<sql>UPDATE etl.job_execution
SET
status = 'DELTA_PROCESSING',
execution_datetime = DEFAULT,
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>128</xloc>
<yloc>224</yloc>
<xloc>816</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
@ -198,8 +197,8 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>80</yloc>
<xloc>1232</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
@ -234,15 +233,15 @@ and recruitment_id = '${IDM_ID}';</sql>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1120</xloc>
<yloc>224</yloc>
<xloc>1744</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -252,81 +251,12 @@ and recruitment_id = '${IDM_ID}';</sql>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>560</xloc>
<yloc>224</yloc>
<xloc>1232</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<key>
<condition>=</condition>
<field>actual</field>
<name>actual</name>
</key>
<schema>ervu_dashboard</schema>
<table>passport</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>passport_number</name>
<rename>passport_number</rename>
<update>Y</update>
</value>
<value>
<name>passport_series</name>
<rename>passport_series</rename>
<update>Y</update>
</value>
<value>
<name>organization_name</name>
<rename>organization_name</rename>
<update>Y</update>
</value>
<value>
<name>unit_code</name>
<rename>unit_code</rename>
<update>Y</update>
</value>
<value>
<name>issue_date</name>
<rename>issue_date</rename>
<update>Y</update>
</value>
<value>
<name>actual</name>
<rename>actual</rename>
<update>N</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>224</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>incapacity_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -339,49 +269,101 @@ and recruitment_id = '${IDM_ID}';</sql>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
ri.recruit_id,
NULLIF(p.passport_data->>'nomDok', '') AS passport_number,
NULLIF(p.passport_data->>'serDok', '') AS passport_series,
NULLIF(p.passport_data->>'vydDok', '') AS organization_name,
NULLIF(p.passport_data->>'kodVydDok', '') AS unit_code,
TO_DATE(NULLIF(p.passport_data->>'dataDok', ''), 'YYYY-MM-DD') AS issue_date,
CASE
WHEN NULLIF(p.passport_data->>'kodStatus', '') = '1' THEN TRUE
ELSE FALSE
END AS actual
FROM recruits_info ri
JOIN ervu_dashboard.citizen r
ON r.recruit_id = ri.recruit_id
AND '${IDM_ID}' != ''
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
AND '${M_R_CR_DATE}'::timestamp >= r.recruit_create_date
AND r.update_date >= '${M_R_UP_DATE}'::timestamp
JOIN LATERAL (
SELECT passport_data
FROM UNNEST(ARRAY[
ri.info->'svedFL'->'svedBS'->'pasportRF'->'aktPasportRF',
ri.info->'svedFL'->'svedBS'->'pasportRF'->'predPasportRF'
]) AS passport_data
WHERE passport_data IS NOT NULL AND passport_data::text &lt;&gt; 'null'
) AS p ON TRUE
</sql>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob' as nedeesposob_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') = 'array'
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}')
SELECT fd.recruit_id,
nedeesposob_elem ->> 'id' AS source_id,
to_date(nedeesposob_elem ->> 'dataSved', 'YYYY-MM-DD') AS source_update_date,
nedeesposob_elem ->> 'naimStatus' AS capacity_status,
nedeesposob_elem ->> 'naimOrg' AS court_name,
to_date(nedeesposob_elem ->> 'dataPrisv', 'YYYY-MM-DD') AS register_date,
to_date(nedeesposob_elem ->> 'dataOkonch', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(fd.nedeesposob_arr -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') AS nedeesposob_elem
WHERE to_date(nedeesposob_elem ->> 'dataSved', 'YYYY-MM-DD') > '${MAX_SOURCE_UPDATE_DATE}';</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>400</xloc>
<yloc>224</yloc>
<xloc>1040</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>disability</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>capacity_status</name>
<rename>capacity_status</rename>
<update>Y</update>
</value>
<value>
<name>court_name</name>
<rename>court_name</rename>
<update>Y</update>
</value>
<value>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1520</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>incapacity_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>subpoena_appearance_flow_repeat</name>
<name>incapacity_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,21 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/08 13:25:52.319</created_date>
<created_date>2025/09/08 20:32:10.265</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/08 13:25:52.319</modified_date>
<modified_date>2025/09/08 20:32:10.265</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<from>Create job execution record</from>
<to>incapacity_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -35,36 +35,36 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>incapacity_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>incapacity_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>400</yloc>
<xloc>1344</xloc>
<yloc>640</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>960</xloc>
<yloc>400</yloc>
<xloc>1120</xloc>
<yloc>640</yloc>
</GUI>
</transform>
<transform>
@ -148,8 +148,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1440</xloc>
<yloc>48</yloc>
<xloc>1344</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
@ -171,32 +171,17 @@ and recruitment_id = '${IDM_ID}';
<replace_variables>Y</replace_variables>
<set_params>N</set_params>
<single_statement>N</single_statement>
<sql>INSERT INTO etl.job_execution (
id,
job_name,
status,
execution_datetime,
error_description,
recruitment_id
)
VALUES (
DEFAULT,
'${JOB_NAME}',
'PROCESSING',
DEFAULT,
NULL,
'${IDM_ID}'
)
ON CONFLICT (job_name, recruitment_id)
DO UPDATE SET
<sql>UPDATE etl.job_execution
SET
status = 'PROCESSING',
execution_datetime = DEFAULT,
execution_datetime = current_timestamp,
error_description = NULL
</sql>
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>272</xloc>
<yloc>208</yloc>
<xloc>416</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
@ -212,8 +197,8 @@ DO UPDATE SET
</partitioning>
<attributes/>
<GUI>
<xloc>1440</xloc>
<yloc>208</yloc>
<xloc>832</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
@ -248,15 +233,15 @@ DO UPDATE SET
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>960</xloc>
<yloc>48</yloc>
<xloc>1344</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -266,115 +251,118 @@ DO UPDATE SET
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>720</xloc>
<yloc>208</yloc>
<xloc>832</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<name>incapacity_input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>WITH filteredData AS (
SELECT ri.recruit_id,
ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob' as nedeesposob_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') = 'array'
AND COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
)
SELECT fd.recruit_id,
nedeesposob_elem ->> 'id' AS source_id,
to_date(nedeesposob_elem ->> 'dataSved', 'YYYY-MM-DD') AS source_update_date,
nedeesposob_elem ->> 'naimStatus' AS capacity_status,
nedeesposob_elem ->> 'naimOrg' AS court_name,
to_date(nedeesposob_elem ->> 'dataPrisv', 'YYYY-MM-DD') AS register_date,
to_date(nedeesposob_elem ->> 'dataOkonch', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(fd.nedeesposob_arr -> 'svedFL' -> 'svedNedeesp' -> 'nedeesposob') AS nedeesposob_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>id</field>
<name>id</name>
</key>
<key>
<condition>=</condition>
<field>created_date_time</field>
<name>created_date_time</name>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>subpoena_appearance</table>
<table>disability</table>
<value>
<name>id</name>
<rename>id</rename>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>created_date_time</name>
<rename>created_date_time</rename>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>subpoena_id</name>
<rename>subpoena_id</rename>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>fact_appearance</name>
<rename>fact_appearance</rename>
<name>capacity_status</name>
<rename>capacity_status</rename>
<update>Y</update>
</value>
<value>
<name>date_appearance</name>
<rename>date_appearance</rename>
<name>court_name</name>
<rename>court_name</rename>
<update>Y</update>
</value>
<value>
<name>nonappearance_info</name>
<rename>nonappearance_info</rename>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>change_date_time</name>
<rename>change_date_time</rename>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>960</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>select sa.*
from public.subpoena_appearance sa
join public.subpoena s on s.id = sa.subpoena_id
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND s.department_id = '${IDM_ID}'
AND '${M_R_CR_DATE}' >= sa.created_date_time
${LIMIT_FW}</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>480</xloc>
<yloc>208</yloc>
<xloc>1120</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>incapacity_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitments_five_flow</name>
<name>recruitment_five_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,41 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/04/18 09:48:01.970</created_date>
<created_date>2025/09/08 19:24:07.006</created_date>
<modified_user>-</modified_user>
<modified_date>2025/04/18 09:48:01.970</modified_date>
<modified_date>2025/09/08 19:24:07.006</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>passport_flow.hpl</to>
<from>Get all recruitments</from>
<to>incapacity_flow.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>passport_flow.hpl 2</to>
<from>Get all recruitments</from>
<to>incapacity_flow.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>passport_flow.hpl 3</to>
<from>Get all recruitments</from>
<to>incapacity_flow.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>passport_flow.hpl 4</to>
<from>Get all recruitments</from>
<to>incapacity_flow.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments ordered by created_date</from>
<to>passport_flow.hpl 5</to>
<from>Get all recruitments</from>
<to>incapacity_flow.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Get all recruitments ordered by created_date</name>
<name>Get all recruitments</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
@ -57,21 +57,20 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
idm_id AS recruitment
idm_id
FROM ervu_dashboard.recruitment;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>304</yloc>
<xloc>560</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow.hpl</name>
<name>incapacity_flow.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -82,7 +81,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -91,13 +90,13 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -122,12 +121,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>144</yloc>
<xloc>832</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow.hpl 2</name>
<name>incapacity_flow.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -138,7 +137,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -147,13 +146,13 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -178,12 +177,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>224</yloc>
<xloc>832</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow.hpl 3</name>
<name>incapacity_flow.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -194,7 +193,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -203,13 +202,13 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -234,12 +233,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>304</yloc>
<xloc>832</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow.hpl 4</name>
<name>incapacity_flow.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -250,7 +249,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -259,13 +258,13 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -290,12 +289,12 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>384</yloc>
<xloc>832</xloc>
<yloc>528</yloc>
</GUI>
</transform>
<transform>
<name>passport_flow.hpl 5</name>
<name>incapacity_flow.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -306,7 +305,7 @@ FROM ervu_dashboard.recruitment;</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/passport/parallel/passport_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -315,13 +314,13 @@ FROM ervu_dashboard.recruitment;</sql>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>passport_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -346,8 +345,8 @@ FROM ervu_dashboard.recruitment;</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>704</xloc>
<yloc>464</yloc>
<xloc>832</xloc>
<yloc>608</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -13,36 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:38:40.987</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:38:40.987</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>subpoena_send_info_flow_delta.hpl</to>
<to>incapacity_flow_delta.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>get_max_source_update_date</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>subpoena_send_info_flow_delta.hpl 2</to>
<to>incapacity_flow_delta.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>subpoena_send_info_flow_delta.hpl 3</to>
<to>incapacity_flow_delta.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>subpoena_send_info_flow_delta.hpl 4</to>
<to>incapacity_flow_delta.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>subpoena_send_info_flow_delta.hpl 5</to>
<to>incapacity_flow_delta.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +62,59 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.idm_id as recruitment_id
<lookup>get_max_source_update_date</lookup>
<sql>WITH mud AS (
SELECT
recruitment_id,
MAX(execution_datetime) AS max_upd_date
FROM etl.job_execution
WHERE job_name = '${JOB_NAME}'
AND status IN ('SUCCESS','DELTA_ERROR','DELTA_SUCCESS','DELTA_PROCESSING')
GROUP BY recruitment_id
)
SELECT
r.idm_id,
? max_source_update_date
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'subpoena_send_info_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
JOIN mud ON mud.recruitment_id = r.idm_id
JOIN recruits_info ri
ON COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = r.idm_id
AND ri.updated_at > mud.max_upd_date;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>704</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>subpoena_send_info_flow_delta.hpl</name>
<name>get_max_source_update_date</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>select max(source_update_date)
from incapacity;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>528</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +125,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/raw_data/subpoena_send_info/parallel/subpoena_send_info_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -95,13 +134,18 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>subpoena_send_info_job</input>
<input>incapacity_job</input>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -126,12 +170,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>128</yloc>
<xloc>896</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
<name>subpoena_send_info_flow_delta.hpl 2</name>
<name>incapacity_flow_delta.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +186,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/raw_data/subpoena_send_info/parallel/subpoena_send_info_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -151,13 +195,18 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>subpoena_send_info_job</input>
<input>incapacity_job</input>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -182,12 +231,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>208</yloc>
<xloc>896</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>subpoena_send_info_flow_delta.hpl 3</name>
<name>incapacity_flow_delta.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +247,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/raw_data/subpoena_send_info/parallel/subpoena_send_info_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -207,13 +256,18 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>subpoena_send_info_job</input>
<input>incapacity_job</input>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -238,12 +292,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>288</yloc>
<xloc>896</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>subpoena_send_info_flow_delta.hpl 4</name>
<name>incapacity_flow_delta.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +308,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/raw_data/subpoena_send_info/parallel/subpoena_send_info_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -263,13 +317,18 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>subpoena_send_info_job</input>
<input>incapacity_job</input>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -294,12 +353,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>368</yloc>
<xloc>896</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
<name>subpoena_send_info_flow_delta.hpl 5</name>
<name>incapacity_flow_delta.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +369,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/raw_data/subpoena_send_info/parallel/subpoena_send_info_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -319,13 +378,18 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>subpoena_send_info_job</input>
<input>incapacity_job</input>
</variable_mapping>
<variable_mapping>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -350,8 +414,8 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>624</xloc>
<yloc>448</yloc>
<xloc>896</xloc>
<yloc>656</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow_delta</name>
<name>recruitment_five_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,36 +13,36 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:31:52.575</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:31:52.575</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>citizenship_flow_delta.hpl</to>
<to>incapacity_flow_repeat.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizenship_flow_delta.hpl 2</to>
<to>incapacity_flow_repeat.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizenship_flow_delta.hpl 3</to>
<to>incapacity_flow_repeat.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizenship_flow_delta.hpl 4</to>
<to>incapacity_flow_repeat.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>citizenship_flow_delta.hpl 5</to>
<to>incapacity_flow_repeat.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,137 +57,25 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.idm_id as recruitment_id
<sql>SELECT r.idm_id AS recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'citizenship_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
AND je.job_name = 'incapacity_job'
WHERE je.id IS NULL
OR je.status IN ('ERROR', 'PROCESSING');</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>citizenship_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizenship/parallel/citizenship_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizenship_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>128</yloc>
</GUI>
</transform>
<transform>
<name>citizenship_flow_delta.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizenship/parallel/citizenship_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizenship_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>208</yloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>citizenship_flow_delta.hpl 3</name>
<name>incapacity_flow_repeat.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +86,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizenship/parallel/citizenship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -213,7 +101,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizenship_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -238,12 +126,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>288</yloc>
<xloc>864</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>citizenship_flow_delta.hpl 4</name>
<name>incapacity_flow_repeat.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +142,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizenship/parallel/citizenship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -269,7 +157,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizenship_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -294,12 +182,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>368</yloc>
<xloc>864</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>citizenship_flow_delta.hpl 5</name>
<name>incapacity_flow_repeat.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +198,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizenship/parallel/citizenship_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -325,7 +213,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>citizenship_job</input>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -350,8 +238,120 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>656</xloc>
<yloc>448</yloc>
<xloc>864</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_flow_repeat.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>864</xloc>
<yloc>544</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_flow_repeat.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/incapacity/parallel/incapacity_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>incapacity_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>864</xloc>
<yloc>624</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>subpoena_appearance_flow</name>
<name>vich_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,16 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 14:31:22.799</created_date>
<created_date>2025/09/08 19:24:20.455</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 14:31:22.799</modified_date>
<modified_date>2025/09/08 19:24:20.455</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<to>vich_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -31,38 +36,33 @@
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Table output</to>
<to>vich_output</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<from>vich_output</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table output</from>
<to>Detect empty stream</to>
<from>vich_output</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>vich_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1376</xloc>
<yloc>512</yloc>
<xloc>1216</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>512</yloc>
<xloc>992</xloc>
<yloc>592</yloc>
</GUI>
</transform>
<transform>
@ -148,8 +148,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1632</xloc>
<yloc>160</yloc>
<xloc>1216</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -178,7 +178,7 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<attributes/>
<GUI>
<xloc>288</xloc>
<yloc>320</yloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
@ -194,8 +194,8 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
</partitioning>
<attributes/>
<GUI>
<xloc>1632</xloc>
<yloc>320</yloc>
<xloc>704</xloc>
<yloc>240</yloc>
</GUI>
</transform>
<transform>
@ -230,15 +230,15 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>160</yloc>
<xloc>1216</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -248,80 +248,78 @@ VALUES (DEFAULT, '${JOB_NAME}', 'PROCESSING', DEFAULT, null, '${IDM_ID}');
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>752</xloc>
<yloc>320</yloc>
<xloc>704</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<name>vich_input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>select sa.*
from public.subpoena_appearance sa
join public.subpoena s on s.id = sa.subpoena_id
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND s.department_id = '${IDM_ID}'
AND '${M_R_CR_DATE}' >= sa.created_date_time
${LIMIT_FW}</sql>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
to_date(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' ->> 'dataSved',
'YYYY-MM-DD') as source_update_date,
ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet' as vich_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet') = 'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}')
SELECT fd.recruit_id,
vich_elem ->> 'id' AS source_id,
fd.source_update_date,
to_date(vich_elem ->> 'dataSnyat', 'YYYY-MM-DD') AS register_date,
to_date(vich_elem ->> 'dataPostUchet', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(vich_arr) AS vich_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>496</xloc>
<yloc>320</yloc>
<xloc>512</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>Table output</name>
<name>vich_output</name>
<type>TableOutput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>1000</commit>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<fields>
<field>
<column_name>id</column_name>
<stream_name>id</stream_name>
<column_name>recruit_id</column_name>
<stream_name>recruit_id</stream_name>
</field>
<field>
<column_name>subpoena_id</column_name>
<stream_name>subpoena_id</stream_name>
<column_name>source_id</column_name>
<stream_name>source_id</stream_name>
</field>
<field>
<column_name>fact_appearance</column_name>
<stream_name>fact_appearance</stream_name>
<column_name>source_update_date</column_name>
<stream_name>source_update_date</stream_name>
</field>
<field>
<column_name>date_appearance</column_name>
<stream_name>date_appearance</stream_name>
<column_name>register_date</column_name>
<stream_name>register_date</stream_name>
</field>
<field>
<column_name>nonappearance_info</column_name>
<stream_name>nonappearance_info</stream_name>
</field>
<field>
<column_name>created_date_time</column_name>
<stream_name>created_date_time</stream_name>
</field>
<field>
<column_name>change_date_time</column_name>
<stream_name>change_date_time</stream_name>
<column_name>deregistration_date</column_name>
<stream_name>deregistration_date</stream_name>
</field>
</fields>
<ignore_errors>N</ignore_errors>
@ -332,26 +330,26 @@ ${LIMIT_FW}</sql>
<return_keys>N</return_keys>
<schema>ervu_dashboard</schema>
<specify_fields>Y</specify_fields>
<table>subpoena_appearance</table>
<table>incapacity</table>
<tablename_in_field>N</tablename_in_field>
<tablename_in_table>Y</tablename_in_table>
<truncate>N</truncate>
<use_batch>Y</use_batch>
<attributes/>
<GUI>
<xloc>1024</xloc>
<yloc>320</yloc>
<xloc>992</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Table output</source_transform>
<source_transform>vich_output</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename>error_code</codes_valuename>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>restriction_document_flow_delta</name>
<name>vich_flow_delta</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,26 +13,21 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:22:56.048</created_date>
<created_date>2025/09/08 20:36:38.686</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:22:56.048</modified_date>
<modified_date>2025/09/08 20:36:38.686</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<to>vich_input</to>
<enabled>Y</enabled>
</hop>
<hop>
@ -40,31 +35,36 @@
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Insert / update</from>
<from>Identify last row in a stream</from>
<to>vich_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>vich_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>vich_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>vich_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
@ -77,13 +77,13 @@
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT_WITH_ERROR</abort_option>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1168</xloc>
<yloc>400</yloc>
<xloc>1744</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -117,8 +117,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>400</yloc>
<xloc>1520</xloc>
<yloc>560</yloc>
</GUI>
</transform>
<transform>
@ -148,8 +148,8 @@ and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>64</yloc>
<xloc>1744</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
@ -174,14 +174,14 @@ and recruitment_id = '${IDM_ID}';
<sql>UPDATE etl.job_execution
SET
status = 'DELTA_PROCESSING',
execution_datetime = DEFAULT,
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>336</xloc>
<yloc>224</yloc>
<xloc>816</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
@ -197,8 +197,8 @@ and recruitment_id = '${IDM_ID}';</sql>
</partitioning>
<attributes/>
<GUI>
<xloc>1248</xloc>
<yloc>224</yloc>
<xloc>1232</xloc>
<yloc>208</yloc>
</GUI>
</transform>
<transform>
@ -233,15 +233,15 @@ and recruitment_id = '${IDM_ID}';</sql>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>64</yloc>
<xloc>1744</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>Y</distribute>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
@ -251,152 +251,108 @@ and recruitment_id = '${IDM_ID}';</sql>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>736</xloc>
<yloc>224</yloc>
<xloc>1232</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<name>vich_input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
to_date(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' ->> 'dataSved',
'YYYY-MM-DD') as source_update_date,
ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet' as vich_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet') = 'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}'
and to_date(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' ->> 'dataSved', 'YYYY-MM-DD') > '${MAX_SOURCE_UPDATE_DATE}'
)
SELECT fd.recruit_id,
vich_elem ->> 'id' AS source_id,
fd.source_update_date,
to_date(vich_elem ->> 'dataSnyat', 'YYYY-MM-DD') AS register_date,
to_date(vich_elem ->> 'dataPostUchet', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(vich_arr) AS vich_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>1040</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>vich_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>id</field>
<name>id</name>
<name2/>
</key>
<key>
<condition>=</condition>
<field>created_at</field>
<name>created_at</name>
<name2/>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>restriction_document</table>
<table>disability</table>
<value>
<name>id</name>
<rename>id</rename>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>subpoena_id</name>
<rename>subpoena_id</rename>
<update>Y</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>vk_id</name>
<rename>vk_id</rename>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>user_id</name>
<rename>user_id</rename>
<update>Y</update>
</value>
<value>
<name>user_name</name>
<rename>user_name</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
<value>
<name>decision_number</name>
<rename>decision_number</rename>
<update>Y</update>
</value>
<value>
<name>decision_date</name>
<rename>decision_date</rename>
<update>Y</update>
</value>
<value>
<name>decision_reason</name>
<rename>decision_reason</rename>
<update>Y</update>
</value>
<value>
<name>extra_info</name>
<rename>extra_info</rename>
<update>Y</update>
</value>
<value>
<name>type</name>
<rename>type</rename>
<update>Y</update>
</value>
<value>
<name>recruitment_name</name>
<rename>recruitment_name</rename>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>224</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>postgres.subpoena</connection>
<execute_each_row>N</execute_each_row>
<sql>SELECT
*
FROM public.restriction_document
WHERE
'${IDM_ID}' != '' -- Проверка на пустую строку
AND vk_id = '${IDM_ID}'
AND updated_at >= '${M_R_UP_DATE}'::timestamp
${LIMIT_FW}</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>544</xloc>
<yloc>224</yloc>
<xloc>1520</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>Insert / update</source_transform>
<source_transform>vich_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>

View file

@ -0,0 +1,372 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>vich_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/09/08 20:32:10.265</created_date>
<modified_user>-</modified_user>
<modified_date>2025/09/08 20:32:10.265</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Change job status on error</from>
<to>Abort</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Create job execution record</from>
<to>incapacity_input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Filter rows</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>Detect empty stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_input</from>
<to>Identify last row in a stream</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Identify last row in a stream</from>
<to>incapacity_upsert</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Filter rows</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>incapacity_upsert</from>
<to>Change job status on error</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Detect empty stream</from>
<to>Change job status on success</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Abort</name>
<type>Abort</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<abort_option>ABORT</abort_option>
<always_log_rows>Y</always_log_rows>
<row_threshold>0</row_threshold>
<attributes/>
<GUI>
<xloc>1344</xloc>
<yloc>608</yloc>
</GUI>
</transform>
<transform>
<name>Change job status on error</name>
<type>ExecSql</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<arguments>
<argument>
<name>error_description</name>
</argument>
</arguments>
<connection>ervu-dashboard</connection>
<execute_each_row>Y</execute_each_row>
<quoteString>N</quoteString>
<replace_variables>Y</replace_variables>
<set_params>Y</set_params>
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET status = 'ERROR',
error_description = ?
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1120</xloc>
<yloc>608</yloc>
</GUI>
</transform>
<transform>
<name>Change job status on success</name>
<type>ExecSql</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<arguments>
</arguments>
<connection>ervu-dashboard</connection>
<execute_each_row>Y</execute_each_row>
<quoteString>N</quoteString>
<replace_variables>Y</replace_variables>
<set_params>N</set_params>
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET status = 'SUCCESS'
WHERE job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';
</sql>
<attributes/>
<GUI>
<xloc>1344</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
<name>Create job execution record</name>
<type>ExecSql</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<arguments>
</arguments>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<quoteString>N</quoteString>
<replace_variables>Y</replace_variables>
<set_params>N</set_params>
<single_statement>N</single_statement>
<sql>UPDATE etl.job_execution
SET
status = 'PROCESSING',
execution_datetime = current_timestamp,
error_description = NULL
where job_name = '${JOB_NAME}'
and recruitment_id = '${IDM_ID}';</sql>
<attributes/>
<GUI>
<xloc>416</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Detect empty stream</name>
<type>DetectEmptyStream</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>256</yloc>
</GUI>
</transform>
<transform>
<name>Filter rows</name>
<type>FilterRows</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<compare>
<condition>
<conditions>
</conditions>
<function>=</function>
<leftvalue>last_row</leftvalue>
<negated>N</negated>
<operator>-</operator>
<value>
<isnull>N</isnull>
<length>-1</length>
<name>constant</name>
<precision>-1</precision>
<text>Y</text>
<type>Boolean</type>
</value>
</condition>
</compare>
<send_true_to>Change job status on success</send_true_to>
<attributes/>
<GUI>
<xloc>1344</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>Identify last row in a stream</name>
<type>DetectLastRow</type>
<description/>
<distribute>N</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<resultfieldname>last_row</resultfieldname>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>WITH filteredData AS (SELECT ri.recruit_id,
to_date(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' ->> 'dataSved',
'YYYY-MM-DD') as source_update_date,
ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet' as vich_arr
FROM recruits_info ri
WHERE jsonb_typeof(ri.info -> 'svedFL' -> 'svedUchetVICH' -> 'svedVICH' -> 'svedUchet') = 'array'
and COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = '${IDM_ID}')
SELECT fd.recruit_id,
vich_elem ->> 'id' AS source_id,
fd.source_update_date,
to_date(vich_elem ->> 'dataSnyat', 'YYYY-MM-DD') AS register_date,
to_date(vich_elem ->> 'dataPostUchet', 'YYYY-MM-DD') AS deregistration_date
FROM filteredData fd
CROSS JOIN LATERAL jsonb_array_elements(vich_arr) AS vich_elem;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform>
<name>incapacity_upsert</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>10000</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>source_id</field>
<name>source_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>disability</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>source_id</name>
<rename>source_id</rename>
<update>N</update>
</value>
<value>
<name>source_update_date</name>
<rename>source_update_date</rename>
<update>Y</update>
</value>
<value>
<name>capacity_status</name>
<rename>capacity_status</rename>
<update>Y</update>
</value>
<value>
<name>court_name</name>
<rename>court_name</rename>
<update>Y</update>
</value>
<value>
<name>register_date</name>
<rename>register_date</rename>
<update>Y</update>
</value>
<value>
<name>deregistration_date</name>
<rename>deregistration_date</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1120</xloc>
<yloc>432</yloc>
</GUI>
</transform>
<transform_error_handling>
<error>
<source_transform>incapacity_upsert</source_transform>
<target_transform>Change job status on error</target_transform>
<is_enabled>Y</is_enabled>
<nr_valuename/>
<descriptions_valuename>error_description</descriptions_valuename>
<fields_valuename/>
<codes_valuename/>
<max_errors/>
<max_pct_errors/>
<min_pct_rows/>
</error>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,330 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/09/08 19:24:07.006</created_date>
<modified_user>-</modified_user>
<modified_date>2025/09/08 19:24:07.006</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Get all recruitments</from>
<to>vich_flow.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments</from>
<to>vich_flow.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments</from>
<to>vich_flow.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments</from>
<to>vich_flow.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Get all recruitments</from>
<to>vich_flow.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Get all recruitments</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<sql>SELECT
idm_id
FROM ervu_dashboard.recruitment;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>560</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>288</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>448</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>528</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
<group_field/>
<group_time/>
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>idm_id</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
<execution_time_field>ExecutionTime</execution_time_field>
<execution_result_field>ExecutionResult</execution_result_field>
<execution_errors_field>ExecutionNrErrors</execution_errors_field>
<execution_lines_read_field>ExecutionLinesRead</execution_lines_read_field>
<execution_lines_written_field>ExecutionLinesWritten</execution_lines_written_field>
<execution_lines_input_field>ExecutionLinesInput</execution_lines_input_field>
<execution_lines_output_field>ExecutionLinesOutput</execution_lines_output_field>
<execution_lines_rejected_field>ExecutionLinesRejected</execution_lines_rejected_field>
<execution_lines_updated_field>ExecutionLinesUpdated</execution_lines_updated_field>
<execution_lines_deleted_field>ExecutionLinesDeleted</execution_lines_deleted_field>
<execution_files_retrieved_field>ExecutionFilesRetrieved</execution_files_retrieved_field>
<execution_exit_status_field>ExecutionExitStatus</execution_exit_status_field>
<execution_log_text_field>ExecutionLogText</execution_log_text_field>
<execution_log_channelid_field>ExecutionLogChannelId</execution_log_channelid_field>
<result_rows_target_transform/>
<result_files_target_transform/>
<result_files_file_name_field>FileName</result_files_file_name_field>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>832</xloc>
<yloc>608</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -13,36 +13,41 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/11 13:16:11.204</created_date>
<created_date>2025/09/08 20:38:40.987</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/11 13:16:11.204</modified_date>
<modified_date>2025/09/08 20:38:40.987</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>drivers_licence_flow_delta.hpl</to>
<to>vich_flow_delta.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>get_max_source_update_date</from>
<to>Table input</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>drivers_licence_flow_delta.hpl 2</to>
<to>vich_flow_delta.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>drivers_licence_flow_delta.hpl 3</to>
<to>vich_flow_delta.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>drivers_licence_flow_delta.hpl 4</to>
<to>vich_flow_delta.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>drivers_licence_flow_delta.hpl 5</to>
<to>vich_flow_delta.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +62,59 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.idm_id as recruitment_id
<lookup>get_max_source_update_date</lookup>
<sql>WITH mud AS (
SELECT
recruitment_id,
MAX(execution_datetime) AS max_upd_date
FROM etl.job_execution
WHERE job_name = '${JOB_NAME}'
AND status IN ('SUCCESS','DELTA_ERROR','DELTA_SUCCESS','DELTA_PROCESSING')
GROUP BY recruitment_id
)
SELECT
r.idm_id,
? max_source_update_date
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'drivers_licence_job'
where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING');</sql>
JOIN mud ON mud.recruitment_id = r.idm_id
JOIN recruits_info ri
ON COALESCE(ri.current_recruitment_id, ri.target_recruitment_id) = r.idm_id
AND ri.updated_at > mud.max_upd_date;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>352</xloc>
<yloc>288</yloc>
<xloc>704</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>drivers_licence_flow_delta.hpl</name>
<name>get_max_source_update_date</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>select max(source_update_date)
from vich_registered;</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>528</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>vich_flow_delta.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +125,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/drivers_licence/parallel/drivers_licence_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -95,13 +134,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>drivers_licence_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -126,12 +165,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>128</yloc>
<xloc>896</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
<name>drivers_licence_flow_delta.hpl 2</name>
<name>vich_flow_delta.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +181,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/drivers_licence/parallel/drivers_licence_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -151,13 +190,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>drivers_licence_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -182,12 +221,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>208</yloc>
<xloc>896</xloc>
<yloc>416</yloc>
</GUI>
</transform>
<transform>
<name>drivers_licence_flow_delta.hpl 3</name>
<name>vich_flow_delta.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +237,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/drivers_licence/parallel/drivers_licence_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -207,13 +246,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>drivers_licence_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -238,12 +277,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>288</yloc>
<xloc>896</xloc>
<yloc>496</yloc>
</GUI>
</transform>
<transform>
<name>drivers_licence_flow_delta.hpl 4</name>
<name>vich_flow_delta.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +293,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/drivers_licence/parallel/drivers_licence_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -263,13 +302,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>drivers_licence_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -294,12 +333,12 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>368</yloc>
<xloc>896</xloc>
<yloc>576</yloc>
</GUI>
</transform>
<transform>
<name>drivers_licence_flow_delta.hpl 5</name>
<name>vich_flow_delta.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +349,7 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/drivers_licence/parallel/drivers_licence_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_delta.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -319,13 +358,13 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<parameters>
<variable_mapping>
<variable>IDM_ID</variable>
<field>recruitment_id</field>
<field>idm_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>drivers_licence_job</input>
<variable>MAX_SOURCE_UPDATE_DATE</variable>
<field>max_source_update_date</field>
<input/>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
@ -350,8 +389,8 @@ where je.status in ('SUCCESS', 'DELTA_ERROR', 'DELTA_SUCCESS', 'DELTA_PROCESSING
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>640</xloc>
<yloc>464</yloc>
<xloc>896</xloc>
<yloc>656</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>recruitment_five_flow_on_error</name>
<name>recruitment_five_flow_repeat</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -13,36 +13,36 @@
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/08/05 12:54:50.126</created_date>
<created_date>2025/09/08 20:31:52.575</created_date>
<modified_user>-</modified_user>
<modified_date>2025/08/05 12:54:50.126</modified_date>
<modified_date>2025/09/08 20:31:52.575</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>punishment_flow_repeat.hpl</to>
<to>vich_flow_repeat.hpl</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_repeat.hpl 3</to>
<to>vich_flow_repeat.hpl 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_repeat.hpl 2</to>
<to>vich_flow_repeat.hpl 3</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_repeat.hpl 4</to>
<to>vich_flow_repeat.hpl 4</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Table input</from>
<to>punishment_flow_repeat.hpl 5</to>
<to>vich_flow_repeat.hpl 5</to>
<enabled>Y</enabled>
</hop>
</order>
@ -57,25 +57,25 @@
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<connection>ervu-dashboard-test</connection>
<execute_each_row>N</execute_each_row>
<limit/>
<sql>SELECT
r.idm_id as recruitment_id
<limit>0</limit>
<sql>SELECT r.idm_id AS recruitment_id
FROM ervu_dashboard.recruitment r
LEFT JOIN etl.job_execution je
ON r.idm_id = je.recruitment_id
and job_name = 'punishment_job'
where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
AND je.job_name = 'incapacity_job'
WHERE je.id IS NULL
OR je.status IN ('ERROR', 'PROCESSING');</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>272</xloc>
<yloc>368</yloc>
<xloc>656</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_repeat.hpl</name>
<name>vich_flow_repeat.hpl</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -86,7 +86,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -98,11 +98,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -126,12 +121,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>208</yloc>
<xloc>864</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_repeat.hpl 2</name>
<name>vich_flow_repeat.hpl 2</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -142,7 +137,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -154,11 +149,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -182,12 +172,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>288</yloc>
<xloc>864</xloc>
<yloc>384</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_repeat.hpl 3</name>
<name>vich_flow_repeat.hpl 3</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -198,7 +188,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -210,11 +200,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -238,12 +223,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>368</yloc>
<xloc>864</xloc>
<yloc>464</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_repeat.hpl 4</name>
<name>vich_flow_repeat.hpl 4</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -254,7 +239,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -266,11 +251,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -294,12 +274,12 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>448</yloc>
<xloc>864</xloc>
<yloc>544</yloc>
</GUI>
</transform>
<transform>
<name>punishment_flow_repeat.hpl 5</name>
<name>vich_flow_repeat.hpl 5</name>
<type>PipelineExecutor</type>
<description/>
<distribute>Y</distribute>
@ -310,7 +290,7 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<schema_name/>
</partitioning>
<run_configuration>local</run_configuration>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/punishment/parallel/punishment_flow_repeat.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/parallel/vich_flow_repeat.hpl</filename>
<filenameInField>N</filenameInField>
<filenameField/>
<group_size>1</group_size>
@ -322,11 +302,6 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<field>recruitment_id</field>
<input/>
</variable_mapping>
<variable_mapping>
<variable>JOB_NAME</variable>
<field/>
<input>punishment_job</input>
</variable_mapping>
<inherit_all_vars>Y</inherit_all_vars>
</parameters>
<execution_result_target_transform/>
@ -350,8 +325,8 @@ where je.status is null or je.status in('ERROR', 'PROCESSING');</sql>
<executors_output_transform/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>528</yloc>
<xloc>864</xloc>
<yloc>624</yloc>
</GUI>
</transform>
<transform_error_handling>

View file

@ -1,27 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<workflow>
<name>job_citizen_spouse</name>
<name>vich_job</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<workflow_version/>
<workflow_status>0</workflow_status>
<created_user>-</created_user>
<created_date>2025/06/05 14:27:15.055</created_date>
<created_date>2025/09/08 16:38:17.982</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/05 14:27:15.055</modified_date>
<modified_date>2025/09/08 16:38:17.982</modified_date>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value>3001-01-01 00:00:00</default_value>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
</parameters>
<actions>
<action>
<name>Start</name>
@ -37,12 +26,12 @@
<schedulerType>0</schedulerType>
<weekDay>1</weekDay>
<parallel>N</parallel>
<xloc>416</xloc>
<yloc>208</yloc>
<xloc>128</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_citizen_spouse_job_exists.hpl</name>
<name>check_if_job_execution_exists.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -52,7 +41,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/checkpoints/check_if_citizen_spouse_job_exists.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_job_execution_exists.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -63,12 +52,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>608</xloc>
<yloc>208</yloc>
<xloc>480</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>citizen_spouse_job_exists_check</name>
<name>employer_job_execution_exists_check</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
@ -80,12 +69,12 @@
<valuetype>variable</valuetype>
<variablename>JOB_EXECUTED_FLAG</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>208</yloc>
<xloc>784</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitments_five_flow.hpl</name>
<name>recruitment_five_flow.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -95,7 +84,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/recruitments_five_flow.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/recruitment_five_flow.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
@ -108,12 +97,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>Y</parallel>
<xloc>1120</xloc>
<yloc>208</yloc>
<xloc>1104</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_five_flow_on_error.hpl</name>
<name>recruitment_five_flow_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -123,7 +112,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/recruitment_five_flow_on_error.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/recruitment_five_flow_repeat.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
@ -136,12 +125,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1120</xloc>
<yloc>512</yloc>
<xloc>1040</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
<name>check_if_need_to_repeat_job.hpl</name>
<name>check_if_need_to_repeat.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -151,9 +140,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/check_if_need_to_repeat_job.hpl</filename>
<logext/>
<logfile/>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/support/check_if_need_to_repeat.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -164,8 +151,8 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>368</yloc>
<xloc>784</xloc>
<yloc>304</yloc>
<attributes_hac/>
</action>
<action>
@ -181,8 +168,8 @@
<valuetype>variable</valuetype>
<variablename>NEED_TO_REPEAT_JOB</variablename>
<parallel>N</parallel>
<xloc>880</xloc>
<yloc>512</yloc>
<xloc>784</xloc>
<yloc>464</yloc>
<attributes_hac/>
</action>
<action>
@ -196,7 +183,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/citizen_spouse/recruitment_five_flow_delta.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/citizen_tables/medicine/vich/recruitment_five_flow_delta.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
@ -209,50 +196,62 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>880</xloc>
<xloc>784</xloc>
<yloc>704</yloc>
<attributes_hac/>
</action>
<action>
<name>init_job_name</name>
<description/>
<type>SET_VARIABLES</type>
<attributes/>
<fields>
<field>
<variable_name>JOB_NAME</variable_name>
<variable_type>CURRENT_WORKFLOW</variable_type>
<variable_value>vich_job</variable_value>
</field>
</fields>
<file_variable_type>CURRENT_WORKFLOW</file_variable_type>
<replacevars>N</replacevars>
<parallel>N</parallel>
<xloc>272</xloc>
<yloc>160</yloc>
<attributes_hac/>
</action>
</actions>
<hops>
<hop>
<from>Start</from>
<to>check_if_citizen_spouse_job_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>check_if_citizen_spouse_job_exists.hpl</from>
<to>citizen_spouse_job_exists_check</to>
<from>check_if_job_execution_exists.hpl</from>
<to>employer_job_execution_exists_check</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_spouse_job_exists_check</from>
<to>recruitments_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>citizen_spouse_job_exists_check</from>
<to>check_if_need_to_repeat_job.hpl</to>
<from>employer_job_execution_exists_check</from>
<to>check_if_need_to_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>check_if_need_to_repeat_job.hpl</from>
<from>check_if_need_to_repeat.hpl</from>
<to>Simple evaluation</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>employer_job_execution_exists_check</from>
<to>recruitment_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple evaluation</from>
<to>recruitment_five_flow_on_error.hpl</to>
<to>recruitment_five_flow_repeat.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
@ -264,6 +263,20 @@
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Start</from>
<to>init_job_name</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>init_job_name</from>
<to>check_if_job_execution_exists.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
</hops>
<notepads>
</notepads>