Академический Документы
Профессиональный Документы
Культура Документы
/bin/ksh
#****** shellscript/gw_start_seq_tasks.sh
# name
# gw_start_seq_tasks.sh
# synopsis
# ./gw_start_seq_tasks.sh
# function
# start process sequence from defined execution order list
# author
# bs
# creation date
# 01.10.2006
# history
# 01.10.2006 - bs - initial implementation
# 24.04.2007 - bs - review because of hardcoded paths and credentials
# 29.04.2007 - bs - added location and /nolog option by sqlplus connections
# 04.05.2007 - bs - checked status of first start of wf.
# 05.05.2007 - bs - renamed log files, to contain prcprocessid
# inputs
# param1 - parameter file defined by absolute path
# output
# 0 - success / 1 - error
# notes
# there is only one input parameter : parameter file with absolute path
# all server and database parameters are taken from infa.txt file
# dynamic parameter file name and task list file name are taken from the
parameter file
# source
#
param_file=${1}
file_name=`basename $param_file`
# as a working directory is used the same directory where is parameterfile
dir=`echo ${param_file} | sed 's/'$file_name'//g'`
inf_file=${dir}/infa.txt
# ifs=";"
echo "pc server : $pc_server (port: $port)"
echo "database : $db/$schema"
echo "archive directory: $arch_dir "
echo "working dir: $dir"
echo "start parameter file: $param_file"
echo "dynamic parameter file: $dynamic_pf"
echo "executionorderlist: $execution_list"
creation_date=`date +%y.%m.%d.%h:%m:%s`
xx=`grep "skip_wf" $spool_file`
if [ x"$xx" != x ]
then
echo " $creation_date skiping wf. processnr: ${process_id} exec.order:
${execution_order} " >>$log_file
echo " $creation_date skiping wf. processnr: ${process_id} exec.order:
${execution_order} " >>$task_list
else
# logging
set `grep "task_exec_order ${process_id} ${execution_order}" $spool_file |
awk '{print $4, $5, $6}'`
task_id=$1
pc_workflow=$2
pc_folder=$3
echo $creation_date : processid=$process_id executionorder=$execution_order
taskid=$task_id folder=$pc_folder workflow=$pc_workflow >> $task_list
# start workflow
# echo "starting: pmcmd startworkflow -s ${pc_server}:${port} -u
${pc_appl_user} -paramfile ${dynamic_pf}-p ${pc_appl_password} -f ${pc_folder}
${pc_workflow} "
echo " $creation_date starting: pmcmd startworkflow -s
${pc_server}:${port} -u administrator -paramfile ${dynamic_pf} -pv pm_pass -f
${pc_folder} ${pc_workflow} " >>$log_file
# pmcmd startworkflow -s ${pc_server}:${port} -u ${pc_appl_user} -paramfile
${dynamic_pf} -p ${pc_appl_password} -f ${pc_folder} ${pc_workflow}
pmcmd startworkflow -s ${pc_server}:${port} -u administrator -paramfile
${dynamic_pf} -pv pm_pass -f ${pc_folder} ${pc_workflow}
status=$?
if [ $status != 0 ]; then echo "error by start of wf ${pc_workflow}" >>
$task_list; echo "error by start of wf ${pc_workflow}" >> $log_file;
run_status=error; else run_status=running; fi
wf_status_spool=${dir}/${pc_folder}_${pc_workflow}_${creation_date}.tmp
# check workflow status every 2 sec.
# run_status=running
while [ "${run_status}" = "running" ]; do
sleep 2
echo "pmcmd getworkflowdetails -s ${pc_server}:${port} -u
administrator -pv pm_pass -f ${pc_folder} ${pc_workflow} > ${wf_status_spool}"
>>$log_file
pmcmd getworkflowdetails -s ${pc_server}:${port} -u administrator
-pv pm_pass -f ${pc_folder} ${pc_workflow} > ${wf_status_spool}
run_status=`grep 'workflow run status:' ${wf_status_spool} | cut -f2
-d '[' | cut -f1 -d ']'`
echo " run status: ${run_status}" >> $log_file
done
echo "final status: ${run_status}" >> $log_file
echo "final status: ${run_status}" >> $task_list
rm ${wf_status_spool}
spool off
exit;
eom`
mv $log_file $arch_dir/${prc_process_id}_log_seq_${creation_date}
mv $task_list $arch_dir/${prc_process_id}_tasklist_$creation_date
mv $spool_file $arch_dir/${prc_process_id}_spool_$creation_date.lst
cp $param_file $arch_dir/${prc_process_id}_${file_name}_${creation_date}
exit $status