This commit is contained in:
r.gaztdinov 2025-08-05 15:59:50 +03:00
parent fcfb9fdb35
commit 1071cca232
22 changed files with 3431 additions and 312 deletions

View file

@ -21,93 +21,18 @@
</notepads>
<order>
<hop>
<from>JSON input state_job_recruits_info.json</from>
<to>Set variables</to>
<from>Table input</from>
<to>Set variables 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Set variables</from>
<from>Set variables 2</from>
<to>Write to log</to>
<enabled>N</enabled>
</hop>
</order>
<transform>
<name>JSON input state_job_recruits_info.json</name>
<type>JsonInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<include>N</include>
<include_field/>
<rownum>N</rownum>
<addresultfile>N</addresultfile>
<readurl>N</readurl>
<removeSourceField>N</removeSourceField>
<IsIgnoreEmptyFile>N</IsIgnoreEmptyFile>
<doNotFailIfNoFile>Y</doNotFailIfNoFile>
<ignoreMissingPath>Y</ignoreMissingPath>
<defaultPathLeafToNull>Y</defaultPathLeafToNull>
<rownum_field/>
<file>
<name>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/state_job_recruits_info.json</name>
<filemask/>
<exclude_filemask/>
<file_required>N</file_required>
<include_subfolders>N</include_subfolders>
</file>
<fields>
<field>
<name>pipeline</name>
<path>$.pipeline</path>
<type>String</type>
<format/>
<currency/>
<decimal/>
<group/>
<length>-1</length>
<precision>-1</precision>
<trim_type>none</trim_type>
<repeat>N</repeat>
</field>
<field>
<name>status</name>
<path>$.status</path>
<type>String</type>
<format/>
<currency/>
<decimal/>
<group/>
<length>-1</length>
<precision>-1</precision>
<trim_type>none</trim_type>
<repeat>N</repeat>
</field>
</fields>
<limit>0</limit>
<IsInFields>N</IsInFields>
<IsAFile>N</IsAFile>
<valueField/>
<shortFileFieldName/>
<pathFieldName/>
<hiddenFieldName/>
<lastModificationTimeFieldName/>
<uriNameFieldName/>
<rootUriNameFieldName/>
<extensionFieldName/>
<sizeFieldName/>
<attributes/>
<GUI>
<xloc>464</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform>
<name>Set variables</name>
<name>Set variables 2</name>
<type>SetVariable</type>
<description/>
<distribute>Y</distribute>
@ -132,8 +57,34 @@
<use_formatting>Y</use_formatting>
<attributes/>
<GUI>
<xloc>720</xloc>
<yloc>336</yloc>
<xloc>736</xloc>
<yloc>192</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>select
status as STATUS,
job_name AS PIPELINE
from public.etl_checkpoints
where job_name = 'job_recruits_info'</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>496</xloc>
<yloc>192</yloc>
</GUI>
</transform>
<transform>

View file

@ -0,0 +1,142 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>branching_recruits_info_delta</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/05/29 10:09:35.340</created_date>
<modified_user>-</modified_user>
<modified_date>2025/05/29 10:09:35.340</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Set variables 2</to>
<enabled>Y</enabled>
</hop>
<hop>
<from>Set variables 2</from>
<to>Write to log</to>
<enabled>N</enabled>
</hop>
</order>
<transform>
<name>Set variables 2</name>
<type>SetVariable</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<fields>
<field>
<field_name>pipeline</field_name>
<variable_name>PIPE</variable_name>
<variable_type>PARENT_WORKFLOW</variable_type>
</field>
<field>
<field_name>status</field_name>
<variable_name>STATUS</variable_name>
<variable_type>PARENT_WORKFLOW</variable_type>
</field>
</fields>
<use_formatting>Y</use_formatting>
<attributes/>
<GUI>
<xloc>736</xloc>
<yloc>192</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
COALESCE(job_name, 'job_recruits_info_delta') AS PIPELINE,
COALESCE(status, 'ERROR') AS STATUS,
current_timestamp::timestamp as record_created
FROM (
select
CASE
WHEN status = 'PROCESSING' THEN 'ERROR'
ELSE status
END AS status,
job_name
from public.etl_checkpoints
where job_name = 'job_recruits_info_delta'
UNION ALL
SELECT NULL, NULL
WHERE NOT EXISTS (
SELECT 1
FROM public.etl_checkpoints
WHERE job_name = 'job_recruits_info_delta'
)
) t</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>496</xloc>
<yloc>192</yloc>
</GUI>
</transform>
<transform>
<name>Write to log</name>
<type>WriteToLog</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<displayHeader>Y</displayHeader>
<fields>
<field>
<name>pipeline</name>
</field>
<field>
<name>status</name>
</field>
</fields>
<limitRows>N</limitRows>
<limitRowsNumber>0</limitRowsNumber>
<loglevel>Basic</loglevel>
<logmessage>${PIPE}
${STATUS}</logmessage>
<attributes/>
<GUI>
<xloc>944</xloc>
<yloc>336</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_error</name>
<name>change_status_delta_recruits_info_flow1_error</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -85,14 +85,7 @@
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info' as job_name,
'ERROR' as status,
current_timestamp as record_created
union all
SELECT
'job_recruits_info' as job_name,
'delta_recruits_info_flow1' as job_name,
'ERROR' as status,
current_timestamp as record_created

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_success</name>
<name>change_status_delta_recruits_info_flow1_success</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -85,17 +85,9 @@
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info' as job_name,
'delta_recruits_info_flow1' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
union all
SELECT
'job_recruits_info' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>

View file

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow2_error</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'ERROR' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow2_success</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow3_error</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'ERROR' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow3_success</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow4_error</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'ERROR' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow4_success</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow5_error</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'ERROR' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>change_status_delta_recruits_info_flow5_success</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<parameters>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/06/16 12:20:06.191</created_date>
<modified_user>-</modified_user>
<modified_date>2025/06/16 12:20:06.191</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>job_name</field>
<name>job_name</name>
</key>
<schema>public</schema>
<table>etl_checkpoints</table>
<value>
<name>job_name</name>
<rename>job_name</rename>
<update>N</update>
</value>
<value>
<name>record_created</name>
<rename>record_created</rename>
<update>Y</update>
</value>
<value>
<name>status</name>
<rename>status</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>1152</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql> SELECT
'delta_recruits_info_flow1' as job_name,
'SUCCESS' as status,
current_timestamp as record_created
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>
<xloc>784</xloc>
<yloc>304</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -128,7 +128,8 @@ SELECT
'PROCESSING' AS status,
'recruits_info_flow5' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints</sql>
--FROM public.etl_checkpoints
</sql>
<variables_active>N</variables_active>
<attributes/>
<GUI>

View file

@ -88,6 +88,46 @@
'PROCESSING' AS status,
'delta_recruits_info' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints
UNION ALL
SELECT
'PROCESSING' AS status,
'recruits_info_delta_flow1' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints
UNION ALL
SELECT
'PROCESSING' AS status,
'recruits_info_delta_flow2' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints
UNION ALL
SELECT
'PROCESSING' AS status,
'recruits_info_delta_flow3' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints
UNION ALL
SELECT
'PROCESSING' AS status,
'recruits_info_delta_flow4' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints
UNION ALL
SELECT
'PROCESSING' AS status,
'recruits_info_delta_flow5' AS job_name,
current_timestamp AS record_created
--FROM public.etl_checkpoints</sql>
<variables_active>N</variables_active>
<attributes/>

View file

@ -366,8 +366,8 @@
<valuetype>variable</valuetype>
<variablename>STATUS</variablename>
<parallel>N</parallel>
<xloc>1744</xloc>
<yloc>2640</yloc>
<xloc>1392</xloc>
<yloc>2400</yloc>
<attributes_hac/>
</action>
<action>
@ -428,12 +428,12 @@
<valuetype>variable</valuetype>
<variablename>STATUS</variablename>
<parallel>N</parallel>
<xloc>1536</xloc>
<yloc>2832</yloc>
<xloc>1520</xloc>
<yloc>3360</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info.hpl</name>
<name>delta_recruits_info_flow1.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -442,44 +442,28 @@
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/delta_recruits_info.hpl</filename>
<exec_per_row>Y</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/parallel/delta_recruits_info_flow1.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>M_R_UP_DATE</name>
<value>${M_R_UP_DATE}</value>
</parameter>
<parameter>
<name>ID_F1</name>
<stream_name>IDM_FLOW1</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<params_from_previous>Y</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2592</xloc>
<yloc>2640</yloc>
<attributes_hac/>
</action>
<action>
<name>Success delta_recruits_info</name>
<description/>
<type>SUCCESS</type>
<attributes/>
<parallel>N</parallel>
<xloc>3424</xloc>
<yloc>2640</yloc>
<attributes_hac/>
</action>
<action>
<name>error delta_recruits_info</name>
<description/>
<type>SUCCESS</type>
<attributes/>
<parallel>N</parallel>
<xloc>3424</xloc>
<yloc>2720</yloc>
<xloc>2544</xloc>
<yloc>2608</yloc>
<attributes_hac/>
</action>
<action>
@ -596,8 +580,8 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1264</xloc>
<yloc>1232</yloc>
<xloc>2288</xloc>
<yloc>1488</yloc>
<attributes_hac/>
</action>
<action>
@ -1364,8 +1348,8 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1728</xloc>
<yloc>1728</yloc>
<xloc>2288</xloc>
<yloc>2320</yloc>
<attributes_hac/>
</action>
<action>
@ -1390,12 +1374,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2048</xloc>
<yloc>2640</yloc>
<xloc>2256</xloc>
<yloc>3200</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_success.hpl</name>
<name>change_status_delta_recruits_info_flow1_success.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -1405,7 +1389,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_success.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_success.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -1416,12 +1400,12 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2928</xloc>
<yloc>2640</yloc>
<xloc>2880</xloc>
<yloc>2608</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_error.hpl</name>
<name>change_status_delta_recruits_info_flow1_error.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
@ -1431,7 +1415,7 @@
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_error.hpl</filename>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_error.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
@ -1442,8 +1426,489 @@
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2928</xloc>
<yloc>2720</yloc>
<xloc>2880</xloc>
<yloc>2688</yloc>
<attributes_hac/>
</action>
<action>
<name>recruitment_rows_five_flow.hpl 2</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/supportive_jobs/recruitment_rows_five_flow.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>CR_DATE</name>
<stream_name>CREATE_DATE</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>Y</parallel>
<xloc>2128</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>Abort workflow 2 2</name>
<description/>
<type>ABORT</type>
<attributes/>
<always_log_rows>N</always_log_rows>
<parallel>N</parallel>
<xloc>2096</xloc>
<yloc>3120</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info_flow2.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>Y</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/parallel/delta_recruits_info_flow2.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>M_R_UP_DATE</name>
<value>${M_R_UP_DATE}</value>
</parameter>
<parameter>
<name>ID_F2</name>
<stream_name>IDM_FLOW2</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>Y</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2544</xloc>
<yloc>2768</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_success.hpl 2</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_success.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>2768</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_error.hpl 2</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_error.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>2848</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info_flow3.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>Y</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/parallel/delta_recruits_info_flow3.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>M_R_UP_DATE</name>
<value>${M_R_UP_DATE}</value>
</parameter>
<parameter>
<name>ID_F3</name>
<stream_name>IDM_FLOW3</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>Y</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2544</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_success.hpl 3</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_success.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_error.hpl 3</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_error.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>3024</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info_flow4.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>Y</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/parallel/delta_recruits_info_flow4.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>M_R_UP_DATE</name>
<value>${M_R_UP_DATE}</value>
</parameter>
<parameter>
<name>ID_F4</name>
<stream_name>IDM_FLOW4</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>Y</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2544</xloc>
<yloc>3104</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_success.hpl 4</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_success.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>3104</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_error.hpl 4</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_error.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>3184</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info_flow5.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>Y</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/parallel/delta_recruits_info_flow5.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<parameter>
<name>M_R_UP_DATE</name>
<value>${M_R_UP_DATE}</value>
</parameter>
<parameter>
<name>ID_F5</name>
<stream_name>IDM_FLOW5</stream_name>
</parameter>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>Y</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2544</xloc>
<yloc>3280</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_success.hpl 5</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_success.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>3280</yloc>
<attributes_hac/>
</action>
<action>
<name>change_status_delta_recruits_info_flow1_error.hpl 5</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/change_status_delta_recruits_info_flow1_error.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>2880</xloc>
<yloc>3360</yloc>
<attributes_hac/>
</action>
<action>
<name>checkpoint_job_recruits_info_delta.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/checkpoints/checkpoint_job_recruits_info_delta.hpl</filename>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>3312</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>Success</name>
<description/>
<type>SUCCESS</type>
<attributes/>
<parallel>N</parallel>
<xloc>3552</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>branching_recruits_info_delta.hpl</name>
<description/>
<type>PIPELINE</type>
<attributes/>
<add_date>N</add_date>
<add_time>N</add_time>
<clear_files>N</clear_files>
<clear_rows>N</clear_rows>
<create_parent_folder>N</create_parent_folder>
<exec_per_row>N</exec_per_row>
<filename>${PROJECT_HOME}/info_recruits/raw_data/info_recruits/branching_recruits_info_delta.hpl</filename>
<logext/>
<logfile/>
<loglevel>Basic</loglevel>
<parameters>
<pass_all_parameters>Y</pass_all_parameters>
</parameters>
<params_from_previous>N</params_from_previous>
<run_configuration>local</run_configuration>
<set_append_logfile>N</set_append_logfile>
<set_logfile>N</set_logfile>
<wait_until_finished>Y</wait_until_finished>
<parallel>N</parallel>
<xloc>1552</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>Simple delta_recruits_info WHERE ERROR</name>
<description/>
<type>SIMPLE_EVAL</type>
<attributes/>
<comparevalue>ERROR</comparevalue>
<fieldname/>
<fieldtype>string</fieldtype>
<mask/>
<maxvalue/>
<minvalue/>
<successbooleancondition>true</successbooleancondition>
<successcondition>equal</successcondition>
<successnumbercondition>equal</successnumbercondition>
<successwhenvarset>N</successwhenvarset>
<valuetype>variable</valuetype>
<variablename>STATUS</variablename>
<parallel>N</parallel>
<xloc>1840</xloc>
<yloc>2944</yloc>
<attributes_hac/>
</action>
<action>
<name>delta_recruits_info success</name>
<description/>
<type>SUCCESS</type>
<attributes/>
<parallel>N</parallel>
<xloc>2016</xloc>
<yloc>2768</yloc>
<attributes_hac/>
</action>
</actions>
@ -1581,20 +2046,6 @@
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple job_recruits_info check NEW</from>
<to>change_status_to_processing.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_to_processing.hpl</from>
<to>recruitment_rows_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruits_info_flow1.hpl</from>
<to>change_status_recruits_info_flow1_success.hpl</to>
@ -1938,62 +2389,258 @@
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow1.hpl</from>
<to>change_status_delta_recruits_info_flow1_success.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow1.hpl</from>
<to>change_status_delta_recruits_info_flow1_error.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple recruits_info WHERE ERROR</from>
<to>job_checkpoints_recruits_info.hwf</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow_if_error.hpl</from>
<to>change_status_to_processing.hpl 2</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_to_processing.hpl 2</from>
<to>job_checkpoints_recruits_info.hwf</to>
<from>Simple job_recruits_info check NEW</from>
<to>recruitment_rows_five_flow.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl</from>
<to>change_status_to_processing.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>delta_recruits_info_flow1.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>change_status_to_processing_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>Simple recruits_info WHERE SUCCESS</from>
<to>change_status_to_processing_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info.hpl</from>
<to>change_status_delta_recruits_info_success.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_success.hpl</from>
<to>Success delta_recruits_info</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info.hpl</from>
<to>change_status_delta_recruits_info_error.hpl</to>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>Abort workflow 2 2</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_error.hpl</from>
<to>error delta_recruits_info</to>
<from>delta_recruits_info_flow2.hpl</from>
<to>change_status_delta_recruits_info_flow1_success.hpl 2</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_to_processing_delta.hpl</from>
<to>delta_recruits_info.hpl</to>
<from>delta_recruits_info_flow2.hpl</from>
<to>change_status_delta_recruits_info_flow1_error.hpl 2</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow3.hpl</from>
<to>change_status_delta_recruits_info_flow1_success.hpl 3</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow3.hpl</from>
<to>change_status_delta_recruits_info_flow1_error.hpl 3</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow4.hpl</from>
<to>change_status_delta_recruits_info_flow1_success.hpl 4</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow4.hpl</from>
<to>change_status_delta_recruits_info_flow1_error.hpl 4</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow5.hpl</from>
<to>change_status_delta_recruits_info_flow1_success.hpl 5</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>delta_recruits_info_flow5.hpl</from>
<to>change_status_delta_recruits_info_flow1_error.hpl 5</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>delta_recruits_info_flow2.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>delta_recruits_info_flow3.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>delta_recruits_info_flow4.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>recruitment_rows_five_flow.hpl 2</from>
<to>delta_recruits_info_flow5.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_success.hpl</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_error.hpl</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_success.hpl 2</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_error.hpl 2</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_success.hpl 3</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_error.hpl 3</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_success.hpl 4</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_error.hpl 4</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_success.hpl 5</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>change_status_delta_recruits_info_flow1_error.hpl 5</from>
<to>checkpoint_job_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>checkpoint_job_recruits_info_delta.hpl</from>
<to>Success</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple recruits_info WHERE SUCCESS</from>
<to>branching_recruits_info_delta.hpl</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>branching_recruits_info_delta.hpl</from>
<to>Simple delta_recruits_info WHERE ERROR</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>Y</unconditional>
</hop>
<hop>
<from>Simple delta_recruits_info WHERE ERROR</from>
<to>recruitment_rows_five_flow.hpl 2</to>
<enabled>Y</enabled>
<evaluation>Y</evaluation>
<unconditional>N</unconditional>
</hop>
<hop>
<from>Simple delta_recruits_info WHERE ERROR</from>
<to>delta_recruits_info success</to>
<enabled>Y</enabled>
<evaluation>N</evaluation>
<unconditional>N</unconditional>
</hop>
</hops>
<notepads>
<notepad>

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>delta_recruits_info</name>
<name>delta_recruits_info_flow1</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
@ -55,7 +55,6 @@
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
<name2/>
</key>
<schema>ervu_dashboard</schema>
<table>recruits_info</table>
@ -134,8 +133,8 @@ FROM recruits_info ri
JOIN recruits r ON r.id = ri.recruit_id
WHERE
r.system_update_date >= '${M_R_UP_DATE}'::timestamp
--'${ID_F1}' != '' -- Проверка на пустую строку
--AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F1}'
'${ID_F1}' != '' -- Проверка на пустую строку
AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F1}'
AND '${M_R_CR_DATE}'::timestamp >= r.system_create_date
--AND '${CR_DATE}' &lt; ri.created_at
--ORDER BY ri.created_at ASC;</sql>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>delta_recruits_info_flow2</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<pipeline_status>0</pipeline_status>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value/>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/05/29 17:05:14.836</created_date>
<modified_user>-</modified_user>
<modified_date>2025/05/29 17:05:14.836</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>recruits_info</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<update>Y</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<update>Y</update>
</value>
<value>
<name>current_recruitment_id</name>
<rename>current_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>target_recruitment_id</name>
<rename>target_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>full_name</name>
<rename>full_name</rename>
<update>Y</update>
</value>
<value>
<name>gir_import_data_version_id</name>
<rename>gir_import_data_version_id</rename>
<update>Y</update>
</value>
<value>
<name>info</name>
<rename>info</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>848</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu_person_registry</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.system_create_date AS created_at,
r.system_update_date AS updated_at,
ri.recruit_id,
ri.info,
ri.gir_import_data_version_id,
r.current_recruitment_id,
r.target_recruitment_id,
r.full_name
FROM recruits_info ri
JOIN recruits r ON r.id = ri.recruit_id
WHERE
r.system_update_date >= '${M_R_UP_DATE}'::timestamp
'${ID_F2}' != '' -- Проверка на пустую строку
AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F2}'
AND '${M_R_CR_DATE}'::timestamp >= r.system_create_date
--AND '${CR_DATE}' &lt; ri.created_at
--ORDER BY ri.created_at ASC;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>delta_recruits_info_flow3</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<pipeline_status>0</pipeline_status>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value/>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/05/29 17:05:14.836</created_date>
<modified_user>-</modified_user>
<modified_date>2025/05/29 17:05:14.836</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>recruits_info</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<update>Y</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<update>Y</update>
</value>
<value>
<name>current_recruitment_id</name>
<rename>current_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>target_recruitment_id</name>
<rename>target_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>full_name</name>
<rename>full_name</rename>
<update>Y</update>
</value>
<value>
<name>gir_import_data_version_id</name>
<rename>gir_import_data_version_id</rename>
<update>Y</update>
</value>
<value>
<name>info</name>
<rename>info</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>848</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu_person_registry</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.system_create_date AS created_at,
r.system_update_date AS updated_at,
ri.recruit_id,
ri.info,
ri.gir_import_data_version_id,
r.current_recruitment_id,
r.target_recruitment_id,
r.full_name
FROM recruits_info ri
JOIN recruits r ON r.id = ri.recruit_id
WHERE
r.system_update_date >= '${M_R_UP_DATE}'::timestamp
'${ID_F3}' != '' -- Проверка на пустую строку
AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F3}'
AND '${M_R_CR_DATE}'::timestamp >= r.system_create_date
--AND '${CR_DATE}' &lt; ri.created_at
--ORDER BY ri.created_at ASC;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>delta_recruits_info_flow4</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<pipeline_status>0</pipeline_status>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value/>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/05/29 17:05:14.836</created_date>
<modified_user>-</modified_user>
<modified_date>2025/05/29 17:05:14.836</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>recruits_info</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<update>Y</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<update>Y</update>
</value>
<value>
<name>current_recruitment_id</name>
<rename>current_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>target_recruitment_id</name>
<rename>target_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>full_name</name>
<rename>full_name</rename>
<update>Y</update>
</value>
<value>
<name>gir_import_data_version_id</name>
<rename>gir_import_data_version_id</rename>
<update>Y</update>
</value>
<value>
<name>info</name>
<rename>info</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>848</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu_person_registry</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.system_create_date AS created_at,
r.system_update_date AS updated_at,
ri.recruit_id,
ri.info,
ri.gir_import_data_version_id,
r.current_recruitment_id,
r.target_recruitment_id,
r.full_name
FROM recruits_info ri
JOIN recruits r ON r.id = ri.recruit_id
WHERE
r.system_update_date >= '${M_R_UP_DATE}'::timestamp
'${ID_F4}' != '' -- Проверка на пустую строку
AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F4}'
AND '${M_R_CR_DATE}'::timestamp >= r.system_create_date
--AND '${CR_DATE}' &lt; ri.created_at
--ORDER BY ri.created_at ASC;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="UTF-8"?>
<pipeline>
<info>
<name>delta_recruits_info_flow5</name>
<name_sync_with_filename>Y</name_sync_with_filename>
<description/>
<extended_description/>
<pipeline_version/>
<pipeline_type>Normal</pipeline_type>
<pipeline_status>0</pipeline_status>
<parameters>
<parameter>
<name>M_R_CR_DATE</name>
<default_value/>
<description/>
</parameter>
<parameter>
<name>M_R_UP_DATE</name>
<default_value/>
<description/>
</parameter>
</parameters>
<capture_transform_performance>N</capture_transform_performance>
<transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
<transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
<created_user>-</created_user>
<created_date>2025/05/29 17:05:14.836</created_date>
<modified_user>-</modified_user>
<modified_date>2025/05/29 17:05:14.836</modified_date>
</info>
<notepads>
</notepads>
<order>
<hop>
<from>Table input</from>
<to>Insert / update</to>
<enabled>Y</enabled>
</hop>
</order>
<transform>
<name>Insert / update</name>
<type>InsertUpdate</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<commit>100</commit>
<connection>ervu-dashboard</connection>
<lookup>
<key>
<condition>=</condition>
<field>recruit_id</field>
<name>recruit_id</name>
</key>
<schema>ervu_dashboard</schema>
<table>recruits_info</table>
<value>
<name>recruit_id</name>
<rename>recruit_id</rename>
<update>N</update>
</value>
<value>
<name>created_at</name>
<rename>created_at</rename>
<update>Y</update>
</value>
<value>
<name>updated_at</name>
<rename>updated_at</rename>
<update>Y</update>
</value>
<value>
<name>current_recruitment_id</name>
<rename>current_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>target_recruitment_id</name>
<rename>target_recruitment_id</rename>
<update>Y</update>
</value>
<value>
<name>full_name</name>
<rename>full_name</rename>
<update>Y</update>
</value>
<value>
<name>gir_import_data_version_id</name>
<rename>gir_import_data_version_id</rename>
<update>Y</update>
</value>
<value>
<name>info</name>
<rename>info</rename>
<update>Y</update>
</value>
</lookup>
<update_bypassed>N</update_bypassed>
<attributes/>
<GUI>
<xloc>848</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform>
<name>Table input</name>
<type>TableInput</type>
<description/>
<distribute>Y</distribute>
<custom_distribution/>
<copies>1</copies>
<partitioning>
<method>none</method>
<schema_name/>
</partitioning>
<connection>ervu_person_registry</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>SELECT
r.system_create_date AS created_at,
r.system_update_date AS updated_at,
ri.recruit_id,
ri.info,
ri.gir_import_data_version_id,
r.current_recruitment_id,
r.target_recruitment_id,
r.full_name
FROM recruits_info ri
JOIN recruits r ON r.id = ri.recruit_id
WHERE
r.system_update_date >= '${M_R_UP_DATE}'::timestamp
'${ID_F5}' != '' -- Проверка на пустую строку
AND COALESCE(current_recruitment_id::text, target_recruitment_id::text) = '${ID_F5}'
AND '${M_R_CR_DATE}'::timestamp >= r.system_create_date
--AND '${CR_DATE}' &lt; ri.created_at
--ORDER BY ri.created_at ASC;</sql>
<variables_active>Y</variables_active>
<attributes/>
<GUI>
<xloc>432</xloc>
<yloc>368</yloc>
</GUI>
</transform>
<transform_error_handling>
</transform_error_handling>
<attributes/>
</pipeline>

View file

@ -57,7 +57,17 @@
<connection>ervu-dashboard</connection>
<execute_each_row>N</execute_each_row>
<limit>0</limit>
<sql>WITH ordered AS (
<sql>WITH
fr_check AS (
SELECT
last_recruitment_id AS recruitment_id,
recruitment_created_date,
status,
job_name
FROM public.etl_checkpoints
WHERE status = 'ERROR' AND job_name IN ('recruits_info_flow1', 'recruits_info_flow2', 'recruits_info_flow3', 'recruits_info_flow4', 'recruits_info_flow5')
),
ordered AS (
SELECT
idm_id,
created_at,
@ -68,130 +78,46 @@
splitted AS (
SELECT *,
CEIL(total_rows / 5.0) AS part_size,
FLOOR((rn - 1) / CEIL(total_rows / 5.0)) + 1 AS flow_num
FLOOR((rn - 1) / CEIL(total_rows / 5.0)) + 1 AS flow_num,
((rn - 1) % CEIL(total_rows / 5.0)) + 1 AS position
FROM ordered
),
flow_ranges AS (
pivoted AS (
SELECT
flow_num,
MIN(created_at) AS min_date,
MAX(created_at) AS max_date
position,
MAX(CASE WHEN flow_num = 1 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow1') THEN created_at END) AS cr_flow1,
MAX(CASE WHEN flow_num = 1 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow1') THEN idm_id::text END) AS idm_flow1,
MAX(CASE WHEN flow_num = 2 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow2') THEN created_at END) AS cr_flow2,
MAX(CASE WHEN flow_num = 2 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow2') THEN idm_id::text END) AS idm_flow2,
MAX(CASE WHEN flow_num = 3 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow3') THEN created_at END) AS cr_flow3,
MAX(CASE WHEN flow_num = 3 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow3') THEN idm_id::text END) AS idm_flow3,
MAX(CASE WHEN flow_num = 4 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow4') THEN created_at END) AS cr_flow4,
MAX(CASE WHEN flow_num = 4 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow4') THEN idm_id::text END) AS idm_flow4,
MAX(CASE WHEN flow_num = 5 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow5') THEN created_at END) AS cr_flow5,
MAX(CASE WHEN flow_num = 5 AND created_at >= (SELECT recruitment_created_date FROM fr_check WHERE job_name = 'recruits_info_flow5') THEN idm_id::text END) AS idm_flow5
FROM splitted
GROUP BY flow_num
),
recr_inf AS (
SELECT DISTINCT COALESCE(current_recruitment_id, target_recruitment_id) AS idm_id
FROM ervu_dashboard.recruits_info
),
flow_data AS (
SELECT
fr.flow_num,
ri.idm_id,
(SELECT r.created_at FROM ervu_dashboard.recruitment r
WHERE r.idm_id = ri.idm_id
AND r.created_at BETWEEN fr.min_date AND fr.max_date
LIMIT 1) AS created_at
FROM recr_inf ri
CROSS JOIN flow_ranges fr
WHERE EXISTS (
SELECT 1 FROM ervu_dashboard.recruitment r
WHERE r.idm_id = ri.idm_id
)
),
result_ma as (
SELECT
MAX(created_at) FILTER(WHERE flow_num = 1) AS cr_flow1,
(SELECT idm_id FROM flow_data WHERE flow_num = 1 ORDER BY created_at DESC NULLS LAST LIMIT 1) AS idm_flow1,
MAX(created_at) FILTER(WHERE flow_num = 2) AS cr_flow2,
(SELECT idm_id FROM flow_data WHERE flow_num = 2 ORDER BY created_at DESC NULLS LAST LIMIT 1) AS idm_flow2,
MAX(created_at) FILTER(WHERE flow_num = 3) AS cr_flow3,
(SELECT idm_id FROM flow_data WHERE flow_num = 3 ORDER BY created_at DESC NULLS LAST LIMIT 1) AS idm_flow3,
MAX(created_at) FILTER(WHERE flow_num = 4) AS cr_flow4,
(SELECT idm_id FROM flow_data WHERE flow_num = 4 ORDER BY created_at DESC NULLS LAST LIMIT 1) AS idm_flow4,
MAX(created_at) FILTER(WHERE flow_num = 5) AS cr_flow5,
(SELECT idm_id FROM flow_data WHERE flow_num = 5 ORDER BY created_at DESC NULLS LAST LIMIT 1) AS idm_flow5
FROM flow_data
),
result_max as ( --костыль
SELECT
CASE
WHEN cr_flow1 is null THEN '0001-01-01 00:00:00'::timestamp
ELSE cr_flow1
END AS cr_flow1,
idm_flow1,
CASE
WHEN cr_flow2 is null THEN '0001-01-01 00:00:00'::timestamp
ELSE cr_flow2
END AS cr_flow2,
idm_flow2,
CASE
WHEN cr_flow3 is null THEN '0001-01-01 00:00:00'::timestamp
ELSE cr_flow3
END AS cr_flow3,
idm_flow3,
CASE
WHEN cr_flow4 is null THEN '0001-01-01 00:00:00'::timestamp
ELSE cr_flow4
END AS cr_flow4,
idm_flow4,
CASE
WHEN cr_flow5 is null THEN '0001-01-01 00:00:00'::timestamp
ELSE cr_flow5
END AS cr_flow5,
idm_flow5
FROM result_ma
GROUP BY position
),
remains as (
select
CASE
WHEN created_at >= (select cr_flow1 from result_max) and flow_num = 1 THEN created_at
ELSE null
END as cr_flow1,
CASE
WHEN created_at >= (select cr_flow1 from result_max) and flow_num = 1 THEN idm_id
ELSE null
END as idm_flow1,
CASE
WHEN created_at >= (select cr_flow2 from result_max) and flow_num = 2 THEN created_at
ELSE null
END as cr_flow2,
CASE
WHEN created_at >= (select cr_flow2 from result_max) and flow_num = 2 THEN idm_id
ELSE null
END as idm_flow2,
CASE
WHEN created_at >= (select cr_flow3 from result_max) and flow_num = 3 THEN created_at
ELSE null
END as cr_flow3,
CASE
WHEN created_at >= (select cr_flow3 from result_max) and flow_num = 3 THEN idm_id
ELSE null
END as idm_flow3,
CASE
WHEN created_at >= (select cr_flow4 from result_max) and flow_num = 4 THEN created_at
ELSE null
END as cr_flow4,
CASE
WHEN created_at >= (select cr_flow4 from result_max) and flow_num = 4 THEN idm_id
ELSE null
END as idm_flow4,
CASE
WHEN created_at >= (select cr_flow5 from result_max) and flow_num = 5 THEN created_at
ELSE null
END as cr_flow5,
CASE
WHEN created_at >= (select cr_flow5 from result_max) and flow_num = 5 THEN idm_id
ELSE null
END as idm_flow5
from splitted
SELECT
position,
cr_flow1,
idm_flow1::uuid,
cr_flow2,
idm_flow2::uuid,
cr_flow3,
idm_flow3::uuid,
cr_flow4,
idm_flow4::uuid,
cr_flow5,
idm_flow5::uuid
FROM pivoted
WHERE (cr_flow1 IS NOT NULL AND idm_flow1 IS NOT NULL)
OR (cr_flow2 IS NOT NULL AND idm_flow2 IS NOT NULL)
OR (cr_flow3 IS NOT NULL AND idm_flow3 IS NOT NULL)
OR (cr_flow4 IS NOT NULL AND idm_flow4 IS NOT NULL)
OR (cr_flow5 IS NOT NULL AND idm_flow5 IS NOT NULL)
order by position
),
idm1 as (
select