diff --git a/boinc_software/cronjobs/acrontab_jobs_sixtadm.txt b/boinc_software/cronjobs/acrontab_jobs_sixtadm.txt index 3c2aa6a..a73d007 100644 --- a/boinc_software/cronjobs/acrontab_jobs_sixtadm.txt +++ b/boinc_software/cronjobs/acrontab_jobs_sixtadm.txt @@ -20,10 +20,14 @@ 20 3 * * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinc ; /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 40 3 * * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinctest ; /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 # -# list studies in spooldirs that could be deleted and notify users (based on dir itself): +# list studies in spooldirs that could be deleted and notify users (based on _ dir itself): 0 4 1 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinc ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 0 5 1 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinctest ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 # # list studies in spooldirs that could be deleted and notify users (based on _/work dir): -0 4 15 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinc ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 -0 5 15 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinctest ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 +0 4 11 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinc ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh work >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 +0 5 11 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinctest ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh work >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 +# +# list studies in spooldirs that could be deleted and notify users (based on _/results dir): +0 4 21 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinc ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh results >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 +0 5 21 * * lxplus.cern.ch cd /afs/cern.ch/work/b/boinc/boinctest ; /afs/cern.ch/user/s/sixtadm/boinc_soft/listDeleteStudies.sh results >> /afs/cern.ch/user/s/sixtadm/boinc_soft/deleteStudies.log 2>&1 diff --git a/boinc_software/cronjobs/listDeleteStudies.sh b/boinc_software/cronjobs/listDeleteStudies.sh index 0a74aa9..2906607 100755 --- a/boinc_software/cronjobs/listDeleteStudies.sh +++ b/boinc_software/cronjobs/listDeleteStudies.sh @@ -5,7 +5,7 @@ # to be run in: # /afs/cern.ch/work/b/boinc/boinc oldN=30 # days -threshOccupancy=0.5 # [GB] +threshOccupancy=1.0 # [GB] BOINCspoolDir=$PWD SCRIPTDIR=`dirname $0` SCRIPTDIR="`cd ${SCRIPTDIR} ; pwd`" @@ -28,11 +28,8 @@ function treatSingleDir(){ } trap "echo \" ...ending at \`date\` .\" " exit +echo "" echo " starting `basename $0` at `date` ..." -lUseWork=true -if [ $# == 1 ] ; then - lUseWork=false -fi # prepare delete dir [ -d delete ] || mkdir delete @@ -46,12 +43,20 @@ echo " checking BOINC spooldir ${BOINCspoolDir} ..." # ask confirmation to user now=`date +"%F_%H-%M-%S"` # find old directories (based on _/work/) -if ${lUseWork} ; then - echo " find old directories based on _/work/ ..." - allStudies=`find . -maxdepth 2 -type d -name work -ctime +${oldN}` - for currCase in ${allStudies} ; do - treatSingleDir ${currCase%/work} - done +if [ -n "$1" ] ; then + case "$1" in + work | results ) + echo " find old directories based on _/$1/ ..." + allStudies=`find . -maxdepth 2 -type d -name $1 -ctime +${oldN}` + for currCase in ${allStudies} ; do + treatSingleDir ${currCase%/$1} + done + ;; + * ) + echo " wrong input: $1 [work|results]" + exit 1 + ;; + esac else echo " find old directories based on _ only ..." allStudies=`find . -maxdepth 1 -type d -ctime +${oldN} | grep -v -e upload -e delete` diff --git a/boinc_software/maintain_common_space/structureMe.sh b/boinc_software/maintain_common_space/structureMe.sh index 21ffc53..6bbca78 100755 --- a/boinc_software/maintain_common_space/structureMe.sh +++ b/boinc_software/maintain_common_space/structureMe.sh @@ -82,11 +82,17 @@ EOF fi if ${lScripts} ; then # make fortran exes for checking fort.10 - cd ${__Dir} - cd utilities/fortran + cd ${__Dir}/utilities/fortran make ls -ltrh cd ${__origDir} + # make documentation + if [ -d ${__Dir}/utilities/doc ] ; then + cd ${__Dir}/utilities/doc + make + ls -ltrh + fi + cd ${__origDir} # echo commits/logs echo "" echo " --> present branch:" @@ -119,6 +125,10 @@ function checkDir(){ cd ${__Dir}/utilities/fortran ls -ltrh cd ${__origDir} + # documentation + cd ${__Dir}/utilities/doc + ls -ltrh + cd ${__origDir} # checkout of SixDB cd ${__Dir}/utilities/externals/SixDeskDB/ git log --max-count=1 diff --git a/utilities/templates/input/fort.3.local b/sixjobs/fort.3.local similarity index 100% rename from utilities/templates/input/fort.3.local rename to sixjobs/fort.3.local diff --git a/sixjobs/scan_definitions b/sixjobs/scan_definitions new file mode 100644 index 0000000..90d80af --- /dev/null +++ b/sixjobs/scan_definitions @@ -0,0 +1,35 @@ +#!/bin/bash + +############################## SCAN OVER A CARTESIAN GRID ###################### + +# specify the variable names to be used in naming the studies, e.g.: +# scan_variables="QP IOCT" +scan_variables="QP IOCT" + +# specify the placeholders in the mask file, e.g.: +# scan_placeholders="%QPV %OCV" +scan_placeholders="%QPV %OCV" + +# specify the prefix of the study name +# e.g. if the base mask file is called hl13B1.mask, we use: +# scan_prefix="hl10BaseB1" +scan_prefix="hl10BaseB1" + +# specify the values to be taken by the different variables +# if we want to use 14 for QP and values between 0 and 40 in steps of 40 for IOCT: +# scan_vals_QP="3" +# scan_vals_IOCT=$(seq -500 200 500) +scan_vals_QP="3" +scan_vals_IOCT=$(seq -500 200 500) + + +############################## SCAN OVER SPECIFIC MASKS ####################### + +# THIS OPTION REQUIRES ALL THE MASK FILES TO BE EXISTING + +# (de)activate the scan over specific masks +scan_masks=false +scan_studies=" +ats2017_QP_0_IOCT_12 +ats2017_QP_0_IOCT_16 +" diff --git a/utilities/templates/input/sixdeskenv b/sixjobs/sixdeskenv similarity index 100% rename from utilities/templates/input/sixdeskenv rename to sixjobs/sixdeskenv diff --git a/utilities/templates/input/sysenv b/sixjobs/sysenv similarity index 100% rename from utilities/templates/input/sysenv rename to sixjobs/sysenv diff --git a/utilities/bash/dot_profile b/utilities/bash/dot_profile index 8af8f7f..540399d 100755 --- a/utilities/bash/dot_profile +++ b/utilities/bash/dot_profile @@ -625,17 +625,17 @@ sixdeskamps() sixdeskAllFloats(){ # A.Mereghetti, 2017-04-08 # new function, to generate an array of floating point values - local __precision=1.0E-15 local __xMin=$1 local __xMax=$2 local __xDelta=$3 + local __lSkipExtremes=$4 # - preliminary checks - to be moved to check_env.sh if [ -z "${__xMax}" ] || [ -z "${__xDelta}" ] ; then __xMax=${__xMin} __xDelta=${__xMin} fi # - get all values - python ${SCRIPTDIR}/python/generate_floats.py ${__xMin} ${__xMax} ${__xDelta} ${__precision} + python ${SCRIPTDIR}/python/generate_floats.py ${__xMin} ${__xMax} ${__xDelta} ${__lSkipExtremes} } sixdeskFixDotFloat(){ # A.Mereghetti, 2017-04-08 @@ -661,10 +661,11 @@ sixdeskAllTunes(){ # - tunesYY: returned array of ver tune values # - inttunesXX: returned array of integer part of hor tunes # - inttunesYY: returned array of integer part of ver tunes + local __lForceIntegers=False # hor: tunesXX="" inttunesXX="" - local __tunesXX=$(sixdeskAllFloats $tunex $tunex1 $deltax) + local __tunesXX=$(sixdeskAllFloats $tunex $tunex1 $deltax ) for tmptn in ${__tunesXX} ; do tunesXX="${tunesXX} $(sixdeskFixDotFloat ${tmptn})" inttunesXX="${inttunesXX} $(sixdeskinttune ${tmptn})" @@ -674,7 +675,7 @@ sixdeskAllTunes(){ # ver: tunesYY="" inttunesYY="" - local __tunesYY=$(sixdeskAllFloats $tuney $tuney1 $deltay) + local __tunesYY=$(sixdeskAllFloats $tuney $tuney1 $deltay ) for tmptn in ${__tunesYY} ; do tunesYY="${tunesYY} $(sixdeskFixDotFloat ${tmptn})" inttunesYY="${inttunesYY} $(sixdeskinttune ${tmptn})" @@ -691,9 +692,10 @@ sixdeskAllAmplitudes(){ ampstart="" ampfinish="" local __iLast=0 + local __lForceIntegers=False # get all amplitude points - local __allAmplitudes=$(sixdeskAllFloats $ns1l $ns2l $nsincl) + local __allAmplitudes=$(sixdeskAllFloats $ns1l $ns2l $nsincl ) __allAmplitudes=( ${__allAmplitudes} ) # get all amplitude intervals @@ -728,7 +730,6 @@ sixdeskAllAngles(){ local __lbackcomp=$7 local __lReduceAngsWithAmplitude=$8 local __totAngle=$9 - local __factor=${10} # generated arrays: KKs="" @@ -740,47 +741,55 @@ sixdeskAllAngles(){ if ${__lReduceAngsWithAmplitude}; then for (( iAmple=0; iAmple<${#fAmpEnds[@]}; iAmple++ )) ; do if (( $(echo "${reduce_angs_with_amplitude} ${fAmpEnds[${iAmple}]}" | awk '{print ($1 < $2)}') )); then + # varying number of angles local __angles_nb=$(echo ${fAmpEnds[${iAmple}]} ${ampfinish} ${__kmax} | awk '{printf ("%.f", ($1/$2*$3))}') else + # constant number of angles local __angles_nb=$(echo ${reduce_angs_with_amplitude} ${ampfinish} ${__kmax} | awk '{printf ("%.f", ($1/$2*$3))}') fi if [ ${__angles_nb} -eq 0 ];then __angles_nb=1 fi - - for (( kk=1; kk<=${__angles_nb}; kk++ )) ; do - KKs="${KKs} ${kk}" - local __tmpAngle=`echo ${totAngle} ${kk} ${__angles_nb} | awk '{print ($1*$2/($3+1))}'` - Angles="${Angles} ${__tmpAngle}" - if ${__lbackcomp} ; then + if ${__lbackcomp} ; then + for (( kk=1; kk<=${__angles_nb}; kk++ )) ; do + KKs="${KKs} ${kk}" + local __tmpAngle=`echo ${totAngle} ${kk} ${__angles_nb} | awk '{print ($1*$2/($3+1))}'` + Angles="${Angles} ${__tmpAngle}" local __kang=`echo ${kk} ${__angles_nb} | awk '{print ($1/($2+1))}'` - else - local __kang=`echo ${kk} ${__angles_nb} | awk '{printf ("%.16E",$1/($2+1))}'` - fi - kAngs="${kAngs} ${__kang}" - done + kAngs="${kAngs} ${__kang}" + done + else + KKs="${KKs} $(seq 1 ${__angles_nb})" + local __angleStep=`echo ${__totAngle} ${__angles_nb} | awk '{print ($1/($2+1))}'` + Angles="${Angles} $(sixdeskAllFloats 0 ${__totAngle} ${__angleStep} True)" + local __kAngleStep=`echo ${__angles_nb} | awk '{printf ("%.18f",1.0/($1+1))}'` + kAngs="${kAngs} $(sixdeskAllFloats 0.0 1.0 ${__kAngleStep} True)" + fi let nTotAngles+=${__angles_nb} KKs_ampl+=( ${nTotAngles} ) done else if ${__lbackcomp} ; then local __angleStep=`echo ${__totAngle} ${__kmax} | awk '{print ($1/($2+1))}'` + for (( kk=${__kmin}; kk<=${__kend}; kk+=${__kstep} )) ; do + KKs="${KKs} ${kk}" + local __tmpAngle=`echo ${kk} ${__angleStep} | awk '{print ($1*$2)}'` + Angles="${Angles} ${__tmpAngle}" + if ${__lbackcomp} ; then + local __kang=`echo ${kk} ${__kmax} | awk '{print ($1/($2+1))}'` + else + local __kang=`echo ${kk} ${__kmax} | awk '{printf ("%.16E",$1/($2+1))}'` + fi + kAngs="${kAngs} ${__kang}" + done else - local __angleStep=`echo ${__totAngle} ${__kmax} | awk '{printf ("%.16E",$1/($2+1))}'` + KKs="${KKs} $(seq 1 ${__kmax})" + local __angleStep=`echo ${__totAngle} ${__kmax} | awk '{print ($1/($2+1))}'` + Angles="${Angles} $(sixdeskAllFloats 0 ${__totAngle} ${__angleStep} True)" + local __kAngleStep=`echo ${__kmax} | awk '{printf ("%.18f",1.0/($1+1))}'` + kAngs="${kAngs} $(sixdeskAllFloats 0.0 1.0 ${__kAngleStep} True)" fi - - for (( kk=${__kmin}; kk<=${__kend}; kk+=${__kstep} )) ; do - KKs="${KKs} ${kk}" - local __tmpAngle=`echo ${kk} ${__angleStep} | awk '{print ($1*$2)}'` - Angles="${Angles} ${__tmpAngle}" - if ${__lbackcomp} ; then - local __kang=`echo ${kk} ${__kmax} | awk '{print ($1/($2+1))}'` - else - local __kang=`echo ${kk} ${__kmax} | awk '{printf ("%.16E",$1/($2+1))}'` - fi - kAngs="${kAngs} ${__kang}" - done KKs_ampl=( ${KKs_ampl} ${__kend} ) fi @@ -803,35 +812,64 @@ sixdeskDefinePythonPath(){ export PATH=${1}:${PATH} sixdeskmess 2 "Setting python to ${1} in PATH env var" fi - sixdeskmess -1 "--> python set to: `which python` - version: `python --version`" + sixdeskmess -1 "Python set to: `which python` - version: `python --version 2>&1`" +} +sixdeskGetCurretPaths(){ + tmpDir=`readlink -f $PWD` + tmpDir=`dirname ${tmpDir}` + tmpWorkspace=`basename ${tmpDir}` + if [[ "${tmpDir}" == *"scratch"* ]] ; then + # scratchdir IS in the path + tmpDir=`dirname ${tmpDir}` + tmpScratchName=`basename ${tmpDir}` + tmpDir=`dirname ${tmpDir}` + tmpBaseDir=${tmpDir} + tmpScratchDir=\$basedir/${tmpScratchName} + tmpTrackDir=\$scratchdir/\$workspace + tmpSixtrackInput=\$scratchdir/sixtrack_input/\$workspace/\$LHCDescrip + tmpSixdeskWork=\$scratchdir/work/\$workspace/\$LHCDescrip + tmpCronLogs=\$scratchdir/cronlogs/\$workspace + tmpSixdeskLogs=\$scratchdir/sixdesklogs/\$workspace + else + # scratchdir is NOT in the path + tmpDir=`dirname ${tmpDir}` + tmpBaseDir=${tmpDir} + tmpScratchDir=\$basedir/\$workspace + tmpTrackDir=\$basedir/\$workspace + tmpSixtrackInput=\$scratchdir/sixtrack_input/\$LHCDescrip + tmpSixdeskWork=\$scratchdir/work/\$LHCDescrip + tmpCronLogs=\$scratchdir/cronlogs + tmpSixdeskLogs=\$scratchdir/sixdesklogs + fi } sixdeskDefineUserTree(){ # define the tree in the userspace - - # input variables are the starting points: - # $1: root dir (ie $basedir in sixdeskenv) - # $2: scratch dir (ie $scratchdir in sixdeskenv) - # $3: workspace name (ie $workspace in sixdeskenv) + # get current paths + sixdeskGetCurretPaths + # basic paths (back-compatibility) - [ -n "${basedir}" ] || export basedir=$1 # eg /afs/cern.ch/user/$initial/$LOGNAME - [ -n "${scratchdir}" ] || export scratchdir=$2 # eg $basedir/scratch0 - [ -n "${trackdir}" ] || export trackdir=$scratchdir/$3 # eg $scratchdir/w2 - [ -n "${sixdeskhome}" ] || export sixdeskhome=$basedir/$3/sixjobs # eg $trackdir/sixjobs - [ -n "${sixdeskhomeStudies}" ] || export sixdeskhomeStudies=$sixdeskhome/studies - - # - [ -n "${sixtrack_input}" ] || export sixtrack_input=$scratchdir/sixtrack_input/$workspace/$LHCDescrip - [ -n "${sixdesklogs}" ] || export sixdesklogs=$scratchdir/sixdesklogs/$workspace - [ -n "${sixdesklogdir}" ] || export sixdesklogdir=$sixdesklogs/$LHCDescrip - [ -n "${cronlogs}" ] || export cronlogs=$scratchdir/cronlogs/$workspace - [ -n "${cronlogdir}" ] || export cronlogdir=$cronlogs/$LHCDescrip + [ -n "${workspace}" ] || export workspace=${tmpWorkspace} + [ -n "${basedir}" ] || export basedir=${tmpBaseDir} + [ -n "${scratchdir}" ] || export scratchdir=${tmpScratchDir} + [ -n "${trackdir}" ] || export trackdir=${tmpTrackDir} + [ -n "${sixtrack_input}" ] || export sixtrack_input=${tmpSixtrackInput} + [ -n "${sixdeskwork}" ] || export sixdeskwork=${tmpSixdeskWork} + + # dependent paths + [ -n "${sixdeskhome}" ] || export sixdeskhome=${basedir}/${workspace}/sixjobs + [ -n "${sixdeskhomeStudies}" ] || export sixdeskhomeStudies=${sixdeskhome}/studies + [ -n "${sixdeskstudy}" ] || export sixdeskstudy=$sixdeskhomeStudies/$LHCDescrip [ -n "${sixdesktrack}" ] || export sixdesktrack=$trackdir/track [ -n "${sixdesktrackStudy}" ] || export sixdesktrackStudy=$sixdesktrack/$LHCDescrip - [ -n "${sixdeskwork}" ] || export sixdeskwork=$scratchdir/work/$workspace/$LHCDescrip + + # logs/jobs paths + [ -n "${sixdesklogs}" ] || export sixdesklogs=${tmpSixdeskLogs} + [ -n "${sixdesklogdir}" ] || export sixdesklogdir=$sixdesklogs/$LHCDescrip + [ -n "${cronlogs}" ] || export cronlogs=${tmpCronLogs} + [ -n "${cronlogdir}" ] || export cronlogdir=$cronlogs/$LHCDescrip [ -n "${sixdeskjobs}" ] || export sixdeskjobs=$sixdeskwork/$sixdeskplatform"jobs" [ -n "${sixdeskjobs_logs}" ] || export sixdeskjobs_logs=$sixdeskjobs/jobs_logs - [ -n "${sixdeskstudy}" ] || export sixdeskstudy=$sixdeskhomeStudies/$LHCDescrip } sixDeskDefineMADXTree(){ local __SCRIPTDIR=$1 @@ -1544,3 +1582,23 @@ function sixdeskCompareVersions(){ # 0: true ; 1: false echo $1 $2 | awk '{print ($1>=$2)}' } +function sixdeskConfirmExit(){ + local __iExit=-1 + while [ ${__iExit} -lt 0 ] ; do + sixdeskmess -1 "Continue? [y/n]" + read answer + case ${answer} in + [yY] | [yY][Ee][Ss] ) + sixdeskmess -1 "Continuing..." + __iExit=0 + ;; + [nN] | [n|N][O|o] ) + sixdeskmess -1 "Interrupted" + __iExit=1 + ;; + *) sixdeskmess -1 "Invalid input!" + ;; + esac + done + return ${__iExit} +} diff --git a/utilities/bash/dot_scan b/utilities/bash/dot_scan new file mode 100755 index 0000000..08e2eb7 --- /dev/null +++ b/utilities/bash/dot_scan @@ -0,0 +1,198 @@ +#!/bin/bash + +############################ build study/mask names ######################### + +function make_array_with_delimiter(){ + # reads an array and returns a string with the array entries separated by the delimiter "|" + # example: + # In: make_array_with_delimiter "1 2 3" + # Out: |1|2|3| + # this function is used as a helper function, because bash can't handle arrays of arrays + + local _array=${1} + local _string="" + local _element + + for _element in ${_array[@]}; do + _string="${_string}|${_element}" + done + _string="${_string}|" + # output result + echo ${_string} +} + +function make_mask_names_values() { + # create two arrays containing the names and the values of the mask files, + # depending on the input given in the file defining the scans + + scan_vars=(${scan_variables}) + + local _result_names="${scan_prefix}" + local _result_values="%" + local _arg + local _x + local _xx + local _r + local _new + + local __iArg=0 + for _arg in "${@:1}" ; do # skip the first arg ( function name ) + IFS='|' read -ra _arg <<< "$_arg" # split at delimiter "|" + _arg="${_arg[@]}" + if [[ ! -z "${_arg// }" ]] ; then # test if the argument is not whites only + # - names + _new="" + for _r in ${_result_names} ; do # take all the old strings... + for _x in $_arg ; do + _xx="${scan_vars[__iArg]}_${_x}" + _new="$_new ${_r}_${_xx}" # ...and append the new parameter + done + done + _result_names=${_new:1} # strip the leading space + # - values + _new="" + for _r in ${_result_values} ; do # take all the old strings... + for _x in $_arg ; do + _new="$_new ${_r}%${_x}" # ...and append the new parameter + done + done + _result_values=${_new:1} # strip the leading space + fi + ((__iArg++)) + done + + scan_studies=${_result_names} + scan_studies=(${scan_studies}) + mask_values=${_result_values} + mask_values=(${mask_values}) + +} + +function get_study_names(){ + if ! ${scan_masks}; then + get_arguments_make_mask + make_mask_names_values ${arguments} + else + sixdeskmess -1 "scan_masks set to TRUE, using defined list of mask names" + scan_studies=(${scan_studies}) + fi +} + +function get_arguments_make_mask(){ + # creates the argument required for make_mask_value and make_mask_names + local _v + local _V + local _Z + local _Y + local _arg + + arguments="" + for _v in ${scan_variables}; do + _V=_v + eval "_Z=scan_vals_\$$_V" # create the variable scan_vals_B + eval "_Y=\$$_Z" # read the value of the variable scan_vals_B + _arg=$(make_array_with_delimiter "${_Y}") + arguments="${arguments} ${_arg}" + done +} + +############################ generate study ############################ + +function set_study(){ + local _study=$1 + sed -i -e "s/export LHCDescrip=.*/export LHCDescrip=${_study}/" sixdeskenv + if [ "$2" == "-l" ] ; then + ${SCRIPTDIR}/bash/set_env.sh -s -l + else + ${SCRIPTDIR}/bash/set_env.sh -s + fi +} + +############################ generate mask files ############################ + +function generate_mask_file(){ + # generate mask files from a template in mask dir + # function to be called in sixjobs + # global vars: iStudy + + local _val + local _j + local _placeholders=(${scan_placeholders}) + local _placeholder + local _study=$1 + local _orimask=mask/${scan_prefix}.mask + local _tmpmask=mask/${_study}.mask + + # read the mask variable values for the particular studies + mask_vals=${mask_values[$iStudy]:2} + # split the string + IFS='%' read -a values <<< "${mask_vals}" + + # replace the individual placeholders in the tmp mask file + local __sedCommand="" + for (( _j=0; _j<${#values[@]}; _j++ )) ; do + # the placeholder to be substituted in the mask file + _placeholder=${_placeholders[${_j}]} + # the value this placeholder shall be replaced with + _val=${values[${_j}]} + # concatenate actual sed commands + __sedCommand="${__sedCommand} -e \"s?${_placeholder}?${_val}?g\"" + done + eval "sed ${__sedCommand} ${_orimask} > ${_tmpmask}" + sixdeskmess -1 "Generated mask file: ${_tmpmask}" +} + +function check_mask_for_placeholders(){ + # check presence of placeholder and ask what to do in case + # it is not found + local _orimask=mask/${scan_prefix}.mask + local _placeholder + for _placeholder in ${scan_placeholders} ; do + if ! grep -q "${_placeholder}" "${_orimask}"; then + sixdeskmess -1 "WARNING: Placeholder ${_placeholder} not found in original mask file ${_orimask}!" + sixdeskConfirmExit + if [ $? -ne 0 ] ; then + sixdeskmess -1 "Please modify mask file or check ${scanDefinitionsFileName}"; + exit 1 + fi + fi + done +} + +############################ actual loop ############################ + +function scan_loop(){ + local __tmpCommand=$1 + local __lSetEnv=$2 + local __llocalfort3=$3 + + iStudy=0 + for study in ${scan_studies[@]} ; do + echo "" + printf "#%.0s" {1..80} + echo "" + # set_env.sh beforehand + if ${__lSetEnv} ; then + if ${__llocalfort3} ; then + echo "--> running: ${SCRIPTDIR}/bash/set_env.sh -l -d ${study}" + ${SCRIPTDIR}/bash/set_env.sh -l -d ${study} + else + echo "--> running: ${SCRIPTDIR}/bash/set_env.sh -d ${study}" + ${SCRIPTDIR}/bash/set_env.sh -d ${study} + fi + fi + # command (-d ${study} not used in case of functions) + if ${__llocalfort3} ; then + echo "--> running command: ${__tmpCommand} ${study} -l" + ${__tmpCommand} ${study} -l + else + echo "--> running command: ${__tmpCommand} ${study}" + ${__tmpCommand} ${study} + fi + # get ready for new iteration + ((iStudy++)) + done + echo "" + printf "#%.0s" {1..80} + echo "" +} diff --git a/utilities/bash/mad6t.sh b/utilities/bash/mad6t.sh index 3b0173b..6a3150e 100755 --- a/utilities/bash/mad6t.sh +++ b/utilities/bash/mad6t.sh @@ -147,8 +147,12 @@ function submit(){ rm -f jobs.list fi + [ -e ${sixtrack_input}/mad6t1.sh ] || cp -p $lsfFilesPath/mad6t1.sh ${sixtrack_input} + [ -e ${sixtrack_input}/mad6t.sh ] || cp -p $lsfFilesPath/mad6t.sh ${sixtrack_input} + [ -e ${sixtrack_input}/mad6t.sub ] || cp -p ${SCRIPTDIR}/templates/htcondor/mad6t.sub ${sixtrack_input} + # Loop over seeds - mad6tjob=$lsfFilesPath/mad6t1.sh + mad6tjob=${sixtrack_input}/mad6t1.sh for (( iMad=$istamad ; iMad<=$iendmad ; iMad++ )) ; do # clean away any existing results for this seed @@ -186,14 +190,13 @@ function submit(){ echo mad6t_${iMad}.sh >> jobs.list fi fi - mad6tjob=$lsfFilesPath/mad6t.sh + mad6tjob=${sixtrack_input}/mad6t.sh done fi if [ "$sixdeskplatform" == "htcondor" ] && ! ${linter} ; then - cp ${SCRIPTDIR}/templates/htcondor/mad6t.sub . - sed -i "s#^+JobFlavour =.*#+JobFlavour = \"${madHTCq}\"#" mad6t.sub - condor_submit -batch-name "mad/$workspace/$LHCDescrip" mad6t.sub + sed -i "s#^+JobFlavour =.*#+JobFlavour = \"${madHTCq}\"#" ${sixtrack_input}/mad6t.sub + condor_submit -batch-name "mad/$workspace/$LHCDescrip" ${sixtrack_input}/mad6t.sub if [ $? -eq 0 ] ; then rm -f jobs.list fi @@ -560,8 +563,6 @@ fi if ${loutform} ; then sixdesklevel=${sixdesklevel_option} fi -# build paths -sixDeskDefineMADXTree ${SCRIPTDIR} # - define locking dirs lockingDirs=( "$sixdeskstudy" "$sixtrack_input" ) diff --git a/utilities/bash/run_six.sh b/utilities/bash/run_six.sh index 26f2338..3a29ef0 100755 --- a/utilities/bash/run_six.sh +++ b/utilities/bash/run_six.sh @@ -2167,12 +2167,6 @@ lrestartTune=false lrestartAmpli=false lrestartAngle=false -# - define user tree -sixdeskDefineUserTree $basedir $scratchdir $workspace - -# - boinc variables -sixDeskSetBOINCVars - # - preliminary checks preliminaryChecksRS if [ $? -gt 0 ] ; then @@ -2454,7 +2448,7 @@ else iTotalAmplitudeSteps=${#allAmplitudeSteps[@]} sixdeskmess -1 "- Amplitudes: from $ns1l to $ns2l by $nsincl - total: ${iTotalAmplitudeSteps} amplitude steps;" # generate array of angles (it returns KKs, Angles and kAngs, and KKs_ampl) - sixdeskAllAngles $kinil $kendl $kmaxl $kstep $ampstart $ampfinish $lbackcomp ${lReduceAngsWithAmplitude} ${totAngle} ${ampFactor} + sixdeskAllAngles $kinil $kendl $kmaxl $kstep $ampstart $ampfinish $lbackcomp ${lReduceAngsWithAmplitude} ${totAngle} iTotalAngles=${#KKs[@]} sixdeskmess -1 "- Angles: $kinil, $kendl, $kmaxl by $kstep - total: ${iTotalAngles} angles" if ${lReduceAngsWithAmplitude} ; then @@ -2467,7 +2461,7 @@ else iTotalAmplitudeSteps=1 sixdeskmess -1 "- Amplitudes: from $ns1s to $ns2s by $nss - total: ${iTotalAmplitudeSteps} amplitude steps;" # generate array of angles (it returns KKs, Angles and kAngs, and KKs_ampl) - sixdeskAllAngles $kini $kend $kmax $kstep $ampstart $ampfinish $lbackcomp ${lReduceAngsWithAmplitude} ${totAngle} ${ampFactor} + sixdeskAllAngles $kini $kend $kmax $kstep $ampstart $ampfinish $lbackcomp ${lReduceAngsWithAmplitude} ${totAngle} iTotalAngles=${#KKs[@]} sixdeskmess -1 "- Angles: $kini, $kend, $kmax by $kstep - total: ${iTotalAngles} angles" let iTotal=${iTotalMad}*${iTotalTunes}*${iTotalAmplitudeSteps}*${iTotalAngles} diff --git a/utilities/bash/run_status b/utilities/bash/run_status index d11ac76..2bdc40b 100755 --- a/utilities/bash/run_status +++ b/utilities/bash/run_status @@ -11,6 +11,55 @@ # See if we have any results or report on a specific task # +function how_to_use() { + cat <] [] + to retrieve boinc results + + options (optional) + -d study name (when running many jobs in parallel) + NB: this option has been implemented to keep the same + interface as for other main scripts. In case the + first optional argument is given, that one will + overwrite the value of this option; + -U unlock dirs necessary to the script to run + PAY ATTENTION when using this option, as no check whether the lock + belongs to this script or not is performed, and you may screw up + processing of another script + -B break backward-compatibility + for the moment, this sticks only to job names + -n renew kerberos token every n jobs (default: ${NrenewKerberosDef}) + -o define output (preferred over the definition of sixdesklevel in sixdeskenv) + 0: only error messages and basic output + 1: full output + 2: extended output for debugging + +EOF +} + +function check_output_option(){ + local __selected_output_valid + __selected_output_valid=false + + case ${OPTARG} in + ''|*[!0-2]*) __selected_output_valid=false ;; + *) __selected_output_valid=true ;; + esac + + if ! ${__selected_output_valid}; then + echo "ERROR: Option -o requires the following arguments:" + echo " 0: only error messages and basic output [default]" + echo " 1: full output" + echo " 2: extended output for debugging" + exit + else + loutform=true + sixdesklevel_option=${OPTARG} + fi + +} + # ------------------------------------------------------------------------------ # preliminary to any action # ------------------------------------------------------------------------------ @@ -22,7 +71,110 @@ if [ -z "${SCRIPTDIR}" ] ; then fi # ------------------------------------------------------------------------------ -source ${SCRIPTDIR}/bash/dot_env +lbackcomp=true +lunlockRunStatus=false +NrenewKerberosDef=2000 +NrenewKerberos=${NrenewKerberosDef} +optArgCurrStudy="-s" +doNotOverwrite="-e" +optArgCurrPlatForm="" +unlockSetEnv="" +loutform=false + +# get options (heading ':' to disable the verbose error handling) +while getopts ":hBo:n:d:U" opt ; do + case $opt in + h) + how_to_use + exit 1 + ;; + B) + # use whatever breaks backward compatibility + lbackcomp=false + ;; + n) + # renew kerberos token every N jobs + NrenewKerberos=${OPTARG} + # check it is actually a number + let NrenewKerberos+=0 + if [ $? -ne 0 ] 2>/dev/null; then + how_to_use + echo "-n argument option is not a number!" + exit 1 + fi + ;; + o) + # output option + check_output_option + ;; + d) + # the user is requesting a specific study + optArgCurrStudy="-d ${OPTARG}" + ;; + U) + # unlock currently locked folder + lunlockRunStatus=true + unlockSetEnv="-U" + ;; + :) + how_to_use + echo "Option -$OPTARG requires an argument." + exit 1 + ;; + \?) + how_to_use + echo "Invalid option: -$OPTARG" + exit 1 + ;; + esac +done +shift "$(($OPTIND - 1))" + +# - load environment +# NB: workaround to get getopts working properly in sourced script +OPTIND=1 + +# optional arguments +if [ -n "$1" ] ; then + optArgCurrStudy="-d $1" +fi +if [ -n "$2" ] ; then + optArgCurrPlatForm="-p $2" +fi + + +# Even if called with a Study ($1) and a Platform ($2) +# we can now use dot_env +echo "" +printf "=%.0s" {1..80} +echo "" +echo "--> sourcing set_env.sh" +printf '.%.0s' {1..80} +echo "" +source ${SCRIPTDIR}/bash/set_env.sh ${optArgCurrStudy} ${optArgCurrPlatForm} ${unlockSetEnv} ${doNotOverwrite} +printf "=%.0s" {1..80} +echo "" +echo "" + +# locking dirs +lockingDirs=( $sixdeskstudy ) + +if ${loutform} ; then + sixdesklevel=${sixdesklevel_option} +fi + +# unlock +if ${lunlockRunStatus} ; then + sixdeskunlockAll +fi + +# setting traps +trap "sixdeskexit 199" EXIT + +# . break backward compatibility +if ! ${lbackcomp} ; then + sixdeskmess -1 " --> flag for backward compatibility de-activated, as requested by user!" +fi #MACRO mymess 0 Checking Study $LHCDescrip sixdeskmesslevel=0 diff --git a/utilities/bash/scans.sh b/utilities/bash/scans.sh new file mode 100755 index 0000000..f1da48c --- /dev/null +++ b/utilities/bash/scans.sh @@ -0,0 +1,175 @@ +#!/bin/bash + +function how_to_use() { + cat < create and initialise a new workspace in the current dir - with the workspace, please specify also the scratch name, eg + ${templateInputFilesPath} + -N create and initialise a new workspace in the current dir; + you can also specify the scratch name with the workspace, eg: -N scratch0/wTest - the workspace will be populated with template files as checked-out - with git from repo: - ${origRepoForSetup} + the scratch can be omitted - it will be simply ignored; + the workspace will be populated with template files as from +EOF + if ${lGitIsThere} ; then + cat </dev/null | wc -l` -eq 1 ] ; then + lGitIsThere=true + cd ${REPOPATH} + # origRepoForSetup='https://github.com/amereghe/SixDesk.git' + origRepoForSetup=`git remote show origin | grep Fetch | awk '{print ($NF)}'` + # origBranchForSetup='newWorkspace' + origBranchForSetup=`git branch | grep '^*' | awk '{print ($2)}'` + cd - 2>&1 > /dev/null +else + lGitIsThere=false +fi + # - necessary input files necessaryInputFiles=( sixdeskenv sysenv ) @@ -196,19 +224,17 @@ lcrwSpace=false loverwrite=true lverbose=false llocalfort3=false +lScanDefs=false lunlock=false +lgit=false currPlatform="" currStudy="" tmpPythonPath="" -origRepoForSetup='https://github.com/amereghe/SixDesk.git' -origBranchForSetup=`git --git-dir=${SCRIPTDIR}/../.git branch | grep '*' | awk '{print ($NF)}'` # variables set based on parsing fort.3.local -nActions=0 - # get options (heading ':' to disable the verbose error handling) -while getopts ":hsvld:ep:P:nN:U" opt ; do +while getopts ":hsvlcd:ep:P:nN:Ug" opt ; do case $opt in h) how_to_use @@ -217,23 +243,26 @@ while getopts ":hsvld:ep:P:nN:U" opt ; do s) # set study (new/update/switch) lset=true - let nActions+=1 ;; d) # load existing study lload=true currStudy="${OPTARG}" - let nActions+=1 ;; n) # copy input files from template dir lcptemplate=true - let nActions+=1 ;; N) # create workspace lcrwSpace=true wSpaceName="${OPTARG}" + # use fort.3.local + llocalfort3=true + # use scan_definitions + lScanDefs=true + # copy input files from template dir + lcptemplate=true ;; e) # do not overwrite @@ -247,6 +276,10 @@ while getopts ":hsvld:ep:P:nN:U" opt ; do # use fort.3.local llocalfort3=true ;; + c) + # use scan_definitions + lScanDefs=true + ;; P) # the user is requesting a specific path to python tmpPythonPath="${OPTARG}" @@ -259,6 +292,14 @@ while getopts ":hsvld:ep:P:nN:U" opt ; do # verbose lverbose=true ;; + g) + # use git sparse checkout to set-up workspace + if ${lGitIsThere} ; then + lgit=true + else + echo " --> git is NOT there: ignoring -g option" + fi + ;; :) how_to_use echo "Option -$OPTARG requires an argument." @@ -278,18 +319,33 @@ if ! ${lset} && ! ${lload} && ! ${lcptemplate} && ! ${lunlock} && ! ${lcrwSpace} how_to_use echo "No action specified!!! aborting..." exit -elif [ ${nActions} -gt 1 ] ; then +elif ${lset} && ${lload} ; then + how_to_use + echo "Cannot set and load study at the same time!!! aborting..." + exit +elif ${lcptemplate} && ${lset} ; then + how_to_use + echo "Cannot copy templates and set study at the same time!!! aborting..." + exit +elif ${lcptemplate} && ${lload} ; then how_to_use - echo "Please choose only one action!!! aborting..." + echo "Cannot copy templates and load study at the same time!!! aborting..." exit fi -# - clean options in case of brand new study -if ${lcptemplate} ; then +# - de-activate currPlatform in case it is not used +if ${lcptemplate} || ${lcrwSpace} ; then if [ -n "${currPlatform}" ] ; then - echo "--> brand new study: -p option with argument ${currPlatform} is switched off." + echo "--> copy templates / creation of workspace: -p option with argument ${currPlatform} is switched off." currPlatform="" fi fi +# - check copy templates: +if ${lcptemplate} ; then + if ${lgit} ; then + lScanDefs=true + llocalfort3=true + fi +fi # - options if [ -n "${currStudy}" ] ; then echo "--> User required a specific study: ${currStudy}" @@ -301,7 +357,21 @@ if ${llocalfort3} ; then echo "" echo "--> User requested inclusion of fort.3.local" echo "" - necessaryInputFiles=( sixdeskenv sysenv fort.3.local ) + necessaryInputFiles=( "${necessaryInputFiles[@]} fort.3.local" ) +fi +if ${lScanDefs} ; then + if ${lcptemplate} ; then + echo "" + echo "--> User requested inclusion of scan_definitions" + echo "" + necessaryInputFiles=( "${necessaryInputFiles[@]} scan_definitions" ) + else + echo "" + echo "--> Inclusion of scan_definitions avaiable only in case of copy" + echo "--> of template files. De-activating it." + echo "" + lScanDefs=false + fi fi # ------------------------------------------------------------------------------ @@ -324,14 +394,9 @@ sixdeskSetLocalNodeStuff # - set up new workspace if ${lcrwSpace} ; then - if [ `echo "${wSpaceName}" | awk 'BEGIN{FS="/"}{print (NF)}'` -ne 2 ] ; then - how_to_use - echo "invalid workspace specification!" - exit 1 - fi sixdeskmess -1 "requested generation of new workspace:" sixdeskmess -1 "- current path: $PWD" - sixdeskmess -1 "- /: ${wSpaceName}" + sixdeskmess -1 "- workspace path: ${wSpaceName}" if [ -d ${wSpaceName} ] ; then how_to_use sixdeskmess -1 "workspace ${wSpaceName} already exists!" @@ -339,8 +404,8 @@ if ${lcrwSpace} ; then else mkdir -p ${wSpaceName} cd ${wSpaceName} - if [ `which git 2>/dev/null | wc -l` -eq 1 ] ; then - sixdeskmess -1 "--> using git to initialise sixjobs" + if ${lgit} ; then + sixdeskmess -1 "Using git to initialise sixjobs dir - repo: ${origRepoForSetup} - branch: ${origBranchForSetup}" git init git config core.sparseCheckout true cat > .git/info/sparse-checkout < initialising sixjobs from ${origDir}" + origDir=${REPOPATH}/sixjobs + sixdeskmess -1 "Initialising sixjobs from ${origDir}" cp -r ${origDir} . fi cd - 2>&1 > /dev/null @@ -362,9 +427,10 @@ EOF touch studies/sixdesklock cd - 2>&1 > /dev/null fi - [ -e `basename ${wSpaceName}` ] || ln -s ${wSpaceName} - if [ ${nActions} -eq 0 ] ; then - sixdeskmess -1 "requested only initilising workspace. Exiting..." + # do we really need this link? + [[ "${wSpaceName}" != *"scratch"* ]] || ln -s ${wSpaceName} + if ! ${lset} && ! ${lload} && ! ${lcptemplate} && ! ${lunlock} ; then + sixdeskmess -1 "requested only initialising workspace. Exiting..." exit 0 fi cd ${wSpaceName}/sixjobs @@ -402,9 +468,6 @@ fi # - basic checks (i.e. dir structure) basicChecks -if [ $? -gt 0 ] ; then - sixdeskexit 4 -fi # ------------------------------------------------------------------------------ # actual operations @@ -415,34 +478,60 @@ sixdesklockAll if ${lcptemplate} ; then - sixdeskmess -1 "copying here template files for brand new study" - sixdeskmess -1 "template input files from ${SCRIPTDIR}/templates/input" - - for tmpFile in ${necessaryInputFiles[@]} ; do - # preserve original time stamps - cp -p ${SCRIPTDIR}/templates/input/${tmpFile} . - sixdeskmess 2 "${tmpFile}" - done - tmpDir=`readlink -f $PWD` - tmpDir=`dirname ${tmpDir}` - workspace=`basename ${tmpDir}` - tmpDir=`dirname ${tmpDir}` - scratchDir=${tmpDir} - tmpDir=`dirname ${tmpDir}` - baseDir=${tmpDir} - sed -i -e "s#^export workspace=.*#export workspace=${workspace}#" \ - -e "s#^export basedir=.*#export basedir=${baseDir}#" \ - -e "s#^export scratchdir=.*#export scratchdir=${scratchDir}#" sixdeskenv + if ${lgit} ; then + sixdeskmess -1 "Using git to get input files in sixjobs - repo: ${origRepoForSetup} - branch: ${origBranchForSetup}" + presDir=$PWD + cd ../ + git branch > /dev/null 2>&1 + if [ $? -ne 0 ] ; then + # set up git repo + git init + git remote add -f origin ${origRepoForSetup} + fi + git config core.sparseCheckout true + if [ `grep 'sixjobs/\*' .git/info/sparse-checkout 2> /dev/null | wc -l` -eq 0 ] ; then + echo 'sixjobs/*' >> .git/info/sparse-checkout + git fetch origin ${origBranchForSetup} + git checkout ${origBranchForSetup} + else + rm -f ${necessaryInputFiles[@]} + git fetch origin ${origBranchForSetup} + git reset --hard + fi + cd ${presDir} + else + sixdeskmess -1 "copying here template files for brand new study" + sixdeskmess -1 "template input files from ${templateInputFilesPath}" + + for tmpFile in ${necessaryInputFiles[@]} ; do + # preserve original time stamps + cp -p ${templateInputFilesPath}/${tmpFile} . + sixdeskmess 2 "${tmpFile}" + done + fi + + # get current paths: + sixdeskGetCurretPaths + sed -i -e "s#^export workspace=.*#export workspace=${tmpWorkspace}#" \ + -e "s#^export basedir=.*#export basedir=${tmpBaseDir}#" \ + -e "s#^export scratchdir=.*#export scratchdir=${tmpScratchDir}#" \ + -e "s#^export trackdir=.*#export trackdir=${tmpTrackDir}#" \ + -e "s#^export sixtrack_input=.*#export sixtrack_input=${tmpSixtrackInput}#" \ + sixdeskenv + sed -i -e "s#^export sixdeskwork=.*#export sixdeskwork=${tmpSixdeskWork}#" \ + -e "s#^export cronlogs=.*#export cronlogs=${tmpCronLogs}#" \ + -e "s#^export sixdesklogs=.*#export sixdesklogs=${tmpSixdeskLogs}#" \ + sysenv else # - make sure we have sixdeskenv/sysenv/fort.3.local files sixdeskInspectPrerequisites ${lverbose} $envFilesPath -s ${necessaryInputFiles[@]} if [ $? -gt 0 ] ; then - sixdeskmess -1 "not all necessary files are in $envFilesPath dir:" - sixdeskmess -1 "missing files: ${necessaryInputFiles[@]}" - sixdeskmess -1 "status of dir:" - \ls -ltrh $envFilesPath + sixdeskmess -1 "not all necessary input files are in $envFilesPath dir:" + for necInpFile in ${necessaryInputFiles[@]} ; do + sixdeskInspectPrerequisites true $envFilesPath -s ${necInpFile} + done sixdeskexit 4 fi @@ -459,13 +548,22 @@ else # - set further envs setFurtherEnvs + # - define user tree + sixdeskDefineUserTree + + # - boinc variables + sixDeskSetBOINCVars + + # - MADX variables + sixDeskDefineMADXTree ${SCRIPTDIR} + # - save input files if ${loverwrite} ; then __lnew=false if ${lset} ; then - if ! [ -d studies/${LHCDescrip} ] ; then + if ! [ -d ${sixdeskstudy} ] ; then __lnew=true - mkdir studies/${LHCDescrip} + mkdir ${sixdeskstudy} fi fi @@ -490,6 +588,15 @@ else # updating an existing study sixdeskmess -1 "Updated sixdeskenv/sysenv(/fort.3.local) for $LHCDescrip" fi + # copy necessary .sub/.sh files + sixdeskmess -1 "if absent, copying necessary .sub/.sh files for MADX run in ${sixtrack_input}" + sixdeskmess -1 " and necessary .sub/.sh files for 6T runs in ${sixdeskwork}" + for tmpFile in htcondor/mad6t.sub lsf/mad6t.sh lsf/mad6t1.sh ; do + [ -e ${sixtrack_input}/`basename ${tmpFile}` ] || cp -p ${SCRIPTDIR}/templates/${tmpFile} ${sixtrack_input} + done + for tmpFile in htcondor/htcondor_run_six.sub htcondor/htcondor_job.sh ; do + [ -e ${sixdeskwork}/`basename ${tmpFile}` ] || cp -p ${SCRIPTDIR}/templates/${tmpFile} ${sixdeskwork} + done elif ${lload} ; then cp ${envFilesPath}/sixdeskenv . cp ${envFilesPath}/sysenv . @@ -559,7 +666,7 @@ if ! ${lcptemplate} ; then # - fs listquota echo "" if [[ "${sixdesktrack}" == "/afs"* ]] ; then - sixdeskmess -1 " --> fs listquota ${sixdesktrack}:" + sixdeskmess -1 "fs listquota ${sixdesktrack}:" tmpLines=`fs listquota ${sixdesktrack}` echo "${tmpLines}" # check, and in case raise a warning @@ -568,7 +675,7 @@ if ! ${lcptemplate} ; then sixdeskmess -1 "WARNING: your quota is above 90%!! pay attention to occupancy of the current study, in case of submission..." fi else - sixdeskmess -1 " --> df -Th ${sixdesktrack}:" + sixdeskmess -1 "df -Th ${sixdesktrack}:" \df -Th ${sixdesktrack} sixdeskmess -1 " the above output is at your convenience, for you to check disk space" fi diff --git a/utilities/bash/sixdb.sh b/utilities/bash/sixdb.sh index 600b688..8a0a5d5 100755 --- a/utilities/bash/sixdb.sh +++ b/utilities/bash/sixdb.sh @@ -14,6 +14,8 @@ function how_to_use() { options (optional): -P python path + -d study name + -a action NB: in case you want yo use an option, please leave the actual arguments to sixdb to the end of the terminal-line command; @@ -29,12 +31,20 @@ fi # initialisation of local vars pythonPath="" +action="" +studyName="" source ${SCRIPTDIR}/bash/dot_profile # get options (heading ':' to disable the verbose error handling) -while getopts ":hP:" opt ; do +while getopts ":hP:d:a:" opt ; do case $opt in + a) + action="${OPTARG}" + ;; + d) + studyName="${OPTARG}" + ;; h) how_to_use exit 1 @@ -73,4 +83,24 @@ if [ `sixdeskCompareVersions ${pyVer} ${requiredPyVersion}` -eq 0 ] ; then fi # actually call sixdb -python ${SCRIPTDIR}/externals/SixDeskDB/sixdb $* +if [ -z "${action}" ] && [ -z "${studyName}" ] ; then + python ${SCRIPTDIR}/externals/SixDeskDB/sixdb $* +elif [ -n "${action}" ] && [ -n "${studyName}" ] ; then + case ${action} in + load_dir ) + python ${SCRIPTDIR}/externals/SixDeskDB/sixdb studies/${studyName} ${action} + ;; + da | mad ) + python ${SCRIPTDIR}/externals/SixDeskDB/sixdb ${studyName}.db ${action} + ;; + *) + echo "Please specify a recognised action [load_dir|da|mad]" + exit 1 + ;; + esac +else + echo "Please specify both -d and -a options at the same time or nothing" + exit 1 +fi + +exit 0 diff --git a/utilities/doc/chGuidelines.tex b/utilities/doc/chGuidelines.tex new file mode 100644 index 0000000..d091060 --- /dev/null +++ b/utilities/doc/chGuidelines.tex @@ -0,0 +1,20 @@ +\chapter{Giudelines and Common Pitfalls} \label{Guidelines} + +\section{Choice of Platform} +HTCondor is convenient when: +\begin{enumerate} +\item results should be collected quickly. This can be the case when + the user has short time to collect data or the simulation set-up + is being defined. In the second case, indeed, one does not want to wait + too long for proceeding; +\item short or few jobs per study. This can be the case when re-submission + of selected cases is necessary, e.g.~to complete a study when few points + in the scan are missing; +\end{enumerate} + +The BOINC platform for volunteer computing is convenient in case of +large simulation campaigns, i.e.~when simulations are long or they +are in high number (e.g.~hundreds of thousands of jobs). + +Not more than 5 scripts per user running at the same time, for ease +of functionality of afs. diff --git a/utilities/doc/chIntroduction.tex b/utilities/doc/chIntroduction.tex new file mode 100644 index 0000000..4a5da37 --- /dev/null +++ b/utilities/doc/chIntroduction.tex @@ -0,0 +1,92 @@ +\chapter{Introduction} \label{Intro} +\SIXTRACK{}~\cite{SixTrack_user_manual,SixPub,sixtrackWeb} is a tracking +code for simulating transverse and longitudinal single particle beam dynamics. +Tracking is treated in a full six--dimensional way, including synchrotron +motion, in a symplectic manner. \SIXTRACK{} is widely used at CERN for +predicting dynamic aperture in large storage +rings~\cite{DynApeStudiesGiovannozzi2015} like the Large Hadron Collider +(LHC)~\cite{NomLHCdesignRepoV1} or its upgrade as foreseen by the +High Luminosity LHC Project (HL-LHC)~\cite{HLLHC_book,HLLHCtechDesRepo}. + +The code was extended~\cite{SixTrackForCollimation} to predict the +performance of a collimation system in terms of loss pattern and cleaning +inefficiency. Hence, \SIXTRACK{} is routinely used nowadays also +for addressing the performance of existing cleaning systems, +like those of the LHC~\cite{LHCCollSys} or of the Relativistic +Heavy Ion Collider (RHIC) at BNL~\cite{RHICcollSys}, or new ones. + +The code is in continuous development~\cite{HLLHCTrackWS,Amereghe6TColl}, +not only to improve the accuracy of the tracking models, but also including +the dynamics introduced by novel accelerator technologies, like electron +lenses or powered wires for the compensation of beam--beam long range effects +or christal collimation. + +The accelerator dynamic aperture is studied scanning +the beam phase space in presence of non-linear forces, like the kicks +introduced by long range beam--beam interactions or multipolar components +of magnetic fields. Moreover, the scan could be also performed varying +the machine configurations. The +\SIXDESK{}~\cite{SixDesk_original,SixDesk_updated} environment gives the +users of \SIXTRACK{} a mean to handle the large amount of files to be treated. + +\section{Overview} \label{Overview} +\begin{enumerate} +\item prepare the \emph{input files}, i.e.~\texttt{sixdeskenv}, +\texttt{sysenv} and \texttt{fort.3.local} +\item \SIXTRACK{} generate file describing the \emph{accelerator geometry} + with \MADX{} (\texttt{fort.2},\texttt{fort.8}, + \texttt{fort.16}); then, run \SIXTRACK{}; then, collect results + (\texttt{fort.10}) and analyse them via \SIXDB{}; +\item inner loops (i.e.~controlled by \texttt{sixdeskenv}) and outer loops + (i.e.~controlled by \texttt{scan\_definitions}); +\end{enumerate} + +\section{Work Flow} +Show workflow of production of results, both for BOINC (including ``processed'' +folder) and HTCondor. + +Retrieval of results depends on the submission platform: +\begin{itemize} +\item \texttt{run\_results}: BOINC +\item \texttt{run\_status}: HTCondor, HTBoinc +\end{itemize} + +\section{Input Files} +\begin{description} +\item[\texttt{sixdeskenv}] +\item[\texttt{sysenv}] +\item[\texttt{fort.3.local}] +\end{description} +Geometry files: \texttt{fort.2}, \texttt{fort.8}, \texttt{fort.16}. + + +\section{The BOINC Platform for Volunteering Computing} +BOINC vs local batch system (e.g.~HTCondor) + +\section{Pre-requisites} +\SIXDESK{} is native to \texttt{lxplus.cern.ch}. Hence, for running in such +an environment, the user does not need to set up anything. On the contrary, +in case of a local machine or other distributed resources, + +\begin{table}[h] +\begin{center} + \caption{Pre-Requisites} + \label{tab:Pre-Requisites} + \begin{tabular}{|l|l|} + \hline + \rowcolor{blue!30} + \textbf{Component} & \textbf{reason} \\ + \hline + kerberos & to renew/check credentials via \texttt{klist} and \texttt{kinit} \\ + \hline + AFS (local mount) & retrieval of optics files \\ + & submission to BOINC via spooldir\\ + \hline + HTCondor (local installation) & submission of jobs to local batch system \\ + \hline + \texttt{python2.7} & \texttt{SixDB} \\ + & computation of floating point scan parameters \\ + \hline + \end{tabular} +\end{center} +\end{table} diff --git a/utilities/doc/chNewFeatures.tex b/utilities/doc/chNewFeatures.tex new file mode 100644 index 0000000..da5b20f --- /dev/null +++ b/utilities/doc/chNewFeatures.tex @@ -0,0 +1,517 @@ +\chapter{New Features} \label{NewFeatures} +This chapter illustrates the new features implemented in \SIXDESK{} from +the user point of view. In general, all the new features have an introduction, +where the rationale and the working principles of the new feature are briefly +presented; afterwards, an essential look at user input and implementation +is given; each section is then closed by a step by step guide, with +examples. In the following, the environment variable +\texttt{SixDeskTools} is assumed and defined as +\begin{lstlisting} + SixDeskTools=/afs/cern.ch/project/sixtrack/SixDesk_utilities/dev +\end{lstlisting} + +\section{Initialisation of Workspace and Study} \label{Initialisation} +\begin{flushright} +\emph{Original work by: A.~Mereghetti} +\end{flushright} +It is useful to have a standard way of setting up workspace and study +from within the \SIXDESK{} script, so that the user does not have to +worry about proper template files and their synchronisation with a given +version of the scripts. + +\subsection{Step-by-Step Guide} +The main steps to properly set up the workspace and a study are: +\begin{enumerate} +\item set up the workspace, e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/set_env.sh -N scratch2/wMySpace +\end{lstlisting} +This action will set up the workspace, taking care of generating +the correct hierarchy between the \texttt{sixjobs} and the \texttt{scratch*} +directories. The action will create also the following tree structure: +\begin{lstlisting} +> cd wMySpace/sixjobs +> tree -h +. +|__ [4.0K] control_files +| |__ [1013] fort.3.mother1_col +| |__ [ 942] fort.3.mother1_inj +| |__ [2.0K] fort.3.mother2_col +| |__ [2.0K] fort.3.mother2_col_b2 +| |__ [2.0K] fort.3.mother2_inj +| |__ [2.0K] fort.3.mother2_inj_b2 +|__ [ 475] fort.3.local +|__ [4.0K] mask +| |__ [ 39K] hl10BaseB1.mask +| |__ [ 35K] hl13B1.mask +|__ [ 996] scan_definitions +|__ [8.2K] sixdeskenv +|__ [ 115] sixdesklock +|__ [4.0K] sixdeskTaskIds +|__ [4.0K] studies +| |__ [ 0] sixdesklock +|__ [4.3K] sysenv + +4 directories, 14 files +\end{lstlisting} +As it can be noted, this action takes care also of making available +to the user \emph{all} template input files (see following item +for details); +\item (optional) go into the \texttt{sixjobs} dir and download templates, + e.g. +\begin{lstlisting} +> cd wMySpace/sixjobs +> $SixDeskTools/utilities/bash/set_env.sh -n -l -c +\end{lstlisting} +This action will make available to the user the template +input files, i.e.~the \texttt{sixdeskenv}, \texttt{sysenv}, +\texttt{fort.3.local} (see Sec.~\ref{fort3local}) and +\texttt{scan\_definitions} (see Sec.~\ref{ExternalScans}) files. +This action will also update +the \texttt{workspace}, \texttt{basedir} and \texttt{scratchdir} +variables in the \texttt{sixdeskenv} file +with the correct values for the workspace just set up. +Please be aware that this operation will overwrite any +pre-existing file in the \texttt{sixjobs} dir. The templates +will be downloaded from +\begin{lstlisting} +${SixDeskTools}/utilities/templates/input +\end{lstlisting} +in this way, templates and scripts are synchronised. +The \texttt{-l} option triggers the download of the +\texttt{fort.3.local} (see Sec.~\ref{fort3local}) file, whereas +the \texttt{-c} option triggers the download of the +\texttt{scan\_definitions} (see Sec.~\ref{ExternalScans}) file. +This action is optional, as it is already performed by the +\texttt{-N} action; nevertheless, it can be performed on its +own and its usage has been shown. +\end{enumerate} + +When using either of the \texttt{-n} or \texttt{-N} actions, +if the user requests the \texttt{-g} option, then all the files +and directories will be downloaded with \texttt{git}. Hence, the +user can profit from the diffing tools available with \texttt{git}; +on the other hand, the disk usage grows (currently $\sim$50~MB +globally after either actions). + +\section{\texttt{fort.3.local}} \label{fort3local} +\begin{flushright} +\emph{Original work by: A.~Mereghetti} +\end{flushright} + +\section{Enforcing the Crossing Angle} \label{EnforceXingAngle} +\begin{flushright} +\emph{Original work by: D.~Pellegrini} \\ +\emph{Updated by: A.~Mereghetti} +\end{flushright} + +\section{Variable Number of Angles with Amplitude} \label{varAnglesWithAmpli} +\begin{flushright} +\emph{Original work by: D.~Pellegrini} \\ +\emph{Updated by: S.~Kostoglou, A.~Mereghetti} +\end{flushright} + +\section{External Scans} \label{ExternalScans} +\begin{flushright} +\emph{Original work by: P.~D.~Hermes, D.~Pellegrini} \\ +\emph{Updated by: A.~Mereghetti} +\end{flushright} +``Internal scans'' are the fundamental scans used to estimate the dynamic +aperture for a given machine configuration, mainly probing the beam phase +space via a linear scan in particle amplitude parametric in angle. +The internal scan also cover different error +configurations of the magnetic fields; optionally, the user can also request +to replicate the study varying the machine tune. +The internal scans are handled by \SIXDESK{} with the input coded +in the \texttt{sixdeskenv} file. +Table~\ref{tab:InternalScanParamters} summarises essential technical +characteristics of the internal scans. +\begin{table}[t] +\begin{center} + \caption{Essential technical + characteristics of the internal scans.} + \label{tab:InternalScanParamters} + \begin{tabular}{|c|l|l|} + \hline + \rowcolor{blue!30} + \textbf{Category} & \textbf{Variable} & \textbf{Comment} \\ + \hline + \multirowcell{2}{beam \\ phase space} + & amplitude & main loop in \SIXDESK{}, sub-loop in \SIXTRACK{} \\ + \cline{2-3} + & angle & loop in \SIXDESK{}, set point in \SIXTRACK{} \\ + \hline + \multirowcell{2}{machine \\ phase space} + & magnetic errors (seed) & loop in \SIXDESK{}, a \MADX{} job each\\ + \cline{2-3} + & tune & loop in \SIXDESK{}, each \SIXTRACK{} job matches the tune \\ + \hline + \end{tabular} +\end{center} +\end{table} + +A \SIXDESK{} study is exactly made of a complete internal scan, with all the +\SIXTRACK{} input files describing the machine (see Sec.~\ref{Overview}) +generated by a single \texttt{*.mask} file. The beam phase space is scanned +based on the settings in \texttt{sixdeskenv} file, and machine parameters like +the multipolar errors and the tune are treated as ``close'' variations of the +original study case. + +``External'' scans identify a set of additional scan parameters, not aimed at +exploring further the beam phase space but machine configurations of possible +interest -- something that could be loosely called machine ``phase space''. +Any point in +an external scan is an independent \SIXDESK{} study, and it can be handled +with the standard tools, since it has its own folders and files. +On the other hand, all the studies have something in common; hence, +it can be suitable to have a set of tools for treating all the +studies in an external scan the same way. + +External scans can be useful to explore the dependence of the dynamic +aperture on parameters like chromaticity, octupole current, and crossing +angles, for the same optics. Therefore, +these scans are based on a 1:1 relation between \MADX{} and +\SIXTRACK{}, i.e.~the knobs defined in \MADX{} are exported as they +are in \SIXTRACK{} by means of the geometry files (see Sec.~\ref{Overview}). +Hence, the user is responsible for assuring that the desired parameters can be +represented by \MADX{} and all the necessary settings are propagated +to \SIXTRACK{} via the geometry input files, +including magnet kicks as computed by the \MADX{} matching. +It should be noted that no parameter defining the internal scan +coded in the \texttt{sixdeskenv} input file is modified. + +Two types of external scans are available to the user: +\begin{enumerate} +\item a scan over a \emph{Cartesian grid} of an arbitrary number + of variables with given steps for each variable. All the studies + will be created and named after a reference machine configuration; + each study will inherit a unique set of values of the scanned variables, + which will appear explicitly in the study name + together with the values actually used; +\item a scan over a \emph{preset list of studies} which must exist. + This option is extremely useful when punctual operations are + required on a sub-set of studies composing the original scan. +\end{enumerate} + +\subsection{Input Files} +\begin{table}[t] +\begin{center} + \caption{Parameters controlling external scans, to be defined by + the user in the \texttt{scan\_definitions} file. The central + block of variables is used for scans on a \emph{Cartesian grid}, + whereas the last block is used for scans on a \emph{preset list} + of studies.} + \label{tab:ExternalScanParameters} + \begin{tabular}{|l|l|l|} + \hline + \rowcolor{blue!30} + \textbf{Parameter Name} & \textbf{Comment} & \textbf{Example} \\ + \hline + \texttt{scan\_masks} & trigger to use preset list of studies & + \texttt{scan\_masks=false} \\ + \hline + \texttt{scan\_variables} & variable names (used in study name) & + \texttt{scan\_variables="B QP"} \\ + \texttt{scan\_vals\_} & values to be explored for variable \texttt{} & + \texttt{scan\_vals\_B="1 4"} \\ + & & \texttt{scan\_vals\_QP="0 2 4"} \\ + \texttt{scan\_placeholders} & placeholders in \texttt{*.mask} file & + \texttt{scan\_placeholders="\%BV \%QPV"} \\ + \texttt{scan\_prefix} & common part of study name & + \texttt{scan\_prefix="HLLHC\_inj"} \\ + \hline + \texttt{scan\_studies} & explicit list of studies in the scan & + \texttt{scan\_studies="HLLHC\_inj\_B\_1\_QP\_4 } \\ + & & \texttt{HLLHC\_inj\_B\_4\_QP\_0"} \\ + \hline + \end{tabular} +\end{center} +\end{table} +The file describing the external scan is the \texttt{scan\_definitions}. +It is a new file to \SIXDESK{}, where the user fully describes the Cartesian +grid of interest or the pre-set list of studies. As for the +\texttt{sixdeskenv} and \texttt{sysenv} files, it must be coded +following the syntax of \texttt{bash}. +Table~\ref{tab:ExternalScanParameters} +lists the variables that the file should contain. +With the \texttt{scan\_masks} logical variable, the user instructs +\SIXDESK{} about the type of external scan to be performed: +\begin{description} +\item[\texttt{scan\_masks=false}] the scan is performed on the + \emph{Cartesian grid}; in this type of scan, the central + block of variables shown in Tab.~\ref{tab:ExternalScanParameters} + are used; +\item[\texttt{scan\_masks=true}] the scan is performed on the + \emph{pre-set list} of studies; in this type of scan, the last + block of variables shown in Tab.~\ref{tab:ExternalScanParameters} + are used. +\end{description} + +It should be kept in mind that, in the case of the \emph{Cartesian grid}, +the user must set up a \texttt{*.mask} file, to be used as template for +the studies in the scan. All the other regular input files +(see Sec.~\ref{Overview}) determine the +internal scan performed in each study, and are essentially cloned, +so that the dynamic aperture is probed in the same way +in all points of the external scan. On the contrary, +in the case of the preset list of studies, all the concerned +studies must be already existing, and no other input file is +required. + +\subsubsection{Scan on a Cartesian Grid} +In the scan on a \emph{Cartesian grid}, all the concerned studies +are generated out of a set of template files, based on a +\texttt{sixdeskenv}, \texttt{sysenv}, \texttt{*.mask} and +\texttt{scan\_definitions} files (and \texttt{fort.3.local}, optionally). +All the optics configurations are variations of the same one coded +in the template \texttt{*.mask} file. + +The user defines the parameter space in the \texttt{scan\_definitions} +file at their will, with no restrictions due to interfaces. The user must +make sure that the desired parameters can be represented by \MADX{} +and all the necessary settings are propagated to \SIXTRACK{} via +the geometry input files (see Sec.~\ref{Overview}). +In fact, contrary to what done normally in +\SIXDESK{}, the user defines suitable \emph{placeholders} that will be +used by \SIXDESK{} for query/replace in the \texttt{*.mask} file and +for disentangling the various studies. Hence, it is responsibility of the +user not only to define the variables and the concerned range of values, +but also to set up the necessary \emph{placeholders} +in the template \texttt{*.mask} file. + +For starting an external scan, the user should prepare: +\begin{itemize} +\item a regular \texttt{sixdeskenv}, to be used as template. + The file is automatically replicated by \SIXDESK{} in all the studies + involved in the scan as is, with the exception of the actual study name + (i.e.~the \texttt{LHCDescrip} field), which is automatically updated + at the generation of the study. + Hence, it is user's convenience to freeze the paramters for the internal + scan before starting the external one, such that all the studies will inherit + immediately the correct parameters and range of values; +\item a regular \texttt{sysenv}, to be cloned as is, with + no further modifications by \SIXDESK{}. As for the \texttt{sixdeskenv} file, + it is user's convenience to set this file up correctly and completely + before starting the external scan; +\item an optional file \texttt{fort.3.local}, to be cloned + as is, with no further modifications by \SIXDESK{}. As for the + \texttt{sixdeskenv} and \texttt{sysenv} files, it is user's convenience + to set this file up correctly and completely before starting the external scan; +\item a template \texttt{*.mask} file, to be used to generate all studies + in the scan. \SIXDESK{} will take care of cloning it to the involved studies, + automatically performing the query/replace of the placeholders + necessary to correctly set up the study. The query/replace patterns + (and hence the placeholders) are uniquely defined by the user, + and no spefic syntax is hard-coded in \SIXDESK{}; +\item the \texttt{scan\_definitions} files, which contains + the full description of the scans. More than a parameter can be scanned + at the same time, and the actual studies handled will follow the cartesian + product of all the parameter values. +\end{itemize} +Table~\ref{tab:ExternalScanInputFile} summarises the key facts about the +input files. +\begin{table}[t] +\begin{center} + \caption{Input files for external scans.} + \label{tab:ExternalScanInputFile} + \begin{tabular}{|l|l|l|} + \hline + \rowcolor{blue!30} + \textbf{File} & \textbf{Comments} & \textbf{Location} \\ + \hline + \texttt{sixdeskenv} & -- a template file for automatic query/replace + & \texttt{sixjobs} \\ + & -- it must define correct settings for the internal scan & \\ + \texttt{sysenv} & cloned as it is & \texttt{sixjobs} \\ + \texttt{*.mask} & -- a template for automatic query/replace & \texttt{mask} \\ + & -- it must contain place holders of scanned parameters & \\ + \texttt{scan\_definitions} & unique & \texttt{sixjobs} \\ + & it describes the scans (\texttt{bash} syntax) & \\ + \hline + \end{tabular} +\end{center} +\end{table} + +The user requests \SIXDESK{} to perform a scan on the \emph{Cartesian grid} +setting the \texttt{scan\_masks} flag in the \texttt{scan\_definitions} file +to \texttt{false}. The same file (see Tab.~\ref{tab:ExternalScanParameters}) +contains all the information necessary to define the scan: +\begin{itemize} +\item the variable names to be looped on are specified by the user via + the \texttt{scan\_variables} variable; +\item the respective placeholders in the \texttt{*.mask} file are + specified via the \texttt{scan\_placeholders}; +\item the range of values to be scanned are specified via variables + like \texttt{scan\_vals\_}, one per scanned parameter + \texttt{}. +\end{itemize} + +When generating the \texttt{*.mask} specific to each study, +\SIXDESK{} will automatically copy the template \texttt{*.mask} +file and query/replace the placeholders with the actual values to be used. +Hence, the parameter names must match actual \emph{placeholders} in the +template \texttt{*.mask} file, and it is the responsibility of the +user to match the \emph{placeholders} listed in the \texttt{scan\_definitions} +with those in the template \texttt{*.mask} file. + +The naming convention of the study (and hence of the \texttt{*.mask} file) +combines a commond name (which can identify e.g.~the specific optics explored in +the scan) and the name of each scanned variable +with the explicit value used in each study. + +Table~\ref{tab:ExternalScanParameters} reports an example of +variables in the \texttt{scan\_definitions}, coding an external scan for +studying the dynamic aperture of the HL-LHC machine at injection; the +scan is performed on both beams (variable \texttt{B}, \texttt{\%BV} +as placeholder in \texttt{*.mask}, and values 1 and 4) with three values +of chromaticity (0, 2 and 4, variable \texttt{QP} and \texttt{\%QPV} as +placeholder in). As it can be seen, names of variables and placeholders +are fully decided by the user, with no rules enforced by \SIXDESK{}. +Anyway, at set-up time, \SIXDESK{} will check that placeholders exist in +the template \texttt{*.mask} file. + +The template \texttt{*.mask} file must be existing in the \texttt{mask} +directory, and it must have the name specified in the \texttt{scan\_prefix} +field in the \texttt{scan\_definitions} file. In the example, +the template \texttt{*.mask} file would be named \texttt{HLLHC\_inj.mask}. +The actual scan is made of 6 studies, named: +\begin{lstlisting} +HLLHC_inj_B_1_QP_0 +HLLHC_inj_B_1_QP_2 +HLLHC_inj_B_1_QP_4 +HLLHC_inj_B_4_QP_0 +HLLHC_inj_B_4_QP_2 +HLLHC_inj_B_4_QP_4 +\end{lstlisting} + +\subsubsection{Scan on a Preset List of Studies} +If the user has already produced the required \texttt{*.mask} files +and want to scan over a specific (sub)set of studies, they can +specify the study names explicitly. This can be useful if they want +to run a command for only a subset of a larger set of studies of the +Cartesian scan. To use this option, the variables used to set up +the \emph{Cartesian product}, listed in the middle block of +Tab.~\ref{tab:ExternalScanParameters} are not suitable, and +those described in last block of the same table should be used. + +The user requests \SIXDESK{} to perform a scan on the \emph{preset list} +of studies setting the \texttt{scan\_masks} flag in the +\texttt{scan\_definitions} file to \texttt{true}. The same file +(see Tab.~\ref{tab:ExternalScanParameters}) specifies also the list +of the studies to be treated via their full name. +As already mentioned, the concerned studies with all their input files +and folders must already exist. + +In the above example, the only studies which will be considered in the +scan are (once the \texttt{scan\_masks} flag is set to \texttt{true} +by the user): +\begin{lstlisting} +HLLHC_inj_B_1_QP_4 +HLLHC_inj_B_4_QP_0 +\end{lstlisting} + +\subsection{Implementation} +The scans are handled via the \texttt{scans.sh} user script; +it is simply a bash wrapper which loops the action requested by the +user over the desired studies. The actual +functions are coded in \texttt{dot\_scan} (\texttt{bash}) library. +Hence, the user will have to deal with only the \texttt{scans.sh} +script. + +To perform a desired action on all the studies in the scan, the user +just need to issue the \texttt{scans.sh} script using the \texttt{-x} +\emph{action} with the detailed command to be performed enclosed within +double quotes. There is no +need to specify the \texttt{-d} \emph{option} for the called script, since +\texttt{scans.sh} will automatically trigger the requeted command on each +study in the scan separately. The script +will take care of looping over all the studies and issue the requested +command on each study. The only exceptional actions that have +dedicated terminal line arguments are the generation +of the actual \texttt{*.mask} files, achieved via the \texttt{-m} +\emph{action}, and the set up of the directories of each study, +achieved via the \texttt{-s} \emph{action}. + +When generating the \texttt{*.mask} files, +the script checks beforehand that all the placeholders that the +user is going to use are found in the \texttt{*.mask} template +file. To disable this option, please use the \texttt{-m} \emph{option}. + +The use of the \texttt{fort.3.local} file can be triggered via +the \texttt{-l} \emph{option}, with no need to replicate it also in +the string passed through the \texttt{-x} \emph{action}. + +A very basic parallelisation of the scan is available. The user can +split the final scan into smaller ones. Each of them must have +its own \texttt{scan\_definitions} files, with a unique name. Then, +the respective number of instances of the \texttt{scans.sh} can be issued, +each with the \texttt{-d} \emph{option} with the specific name +of the \texttt{scan\_definitions} instance to be used. + +\subsection{Step-by-Step Guide} +This guide is given for an external scan on a \emph{Cartesian grid} +started from scratch: +\begin{enumerate} +\item set up your workspace and download template files + (see Sec.~\ref{Initialisation}); +\item edit all the necessary files, e.g. + \begin{enumerate} + \item \texttt{sixdeskenv} and \texttt{sysenv}, properly setting up + the internal scans, versions of codes, etc\ldots~Please, make sure + that the \texttt{xing} variable in \texttt{sixdeskenv} is not + active (see Sec.~\ref{EnforceXingAngle}); + \item template \texttt{*.mask} file in the \texttt{mask} directory, + and \texttt{scan\_definitions}. Please make sure that: + \begin{itemize} + \item \texttt{scan\_prefix} matches the name of the template + \texttt{*.mask} file; + \item the lists contained in \texttt{scan\_variables} and + \texttt{scan\_placeholders} match; + \item for every variable scanned (e.g.~\texttt{QP}), you have the + corresponding list of values defined in the \texttt{scan\_vals\_*} + (e.g.~\texttt{scan\_vals\_QP}); + \item all the placeholders defined in \texttt{scan\_placeholders} + are actually in the \texttt{*.mask} template file, and in the + correct positions. Please keep in mind that the query/replace + will be performed via a \texttt{sed} command; + \end{itemize} + \end{enumerate} +\item generate all the necessary \texttt{*.mask} file and the + studies, e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -m -s -l +\end{lstlisting} +The \texttt{-l} \emph{option} is illustrated in the example to show +the command in case the \texttt{fort.3.local} file is required. +The \texttt{-m} \emph{action} (i.e.~generation of \texttt{*.mask} files) +and the \texttt{-s} \emph{action} (i.e.~set up of studies) can also be +performed separately; +\item run \MADX{} and generate the geometry files for the \SIXTRACK{} + jobs, e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -x "mad6t.sh -s" +\end{lstlisting} +Once the jobs are over, it is good practice to check them before +running \SIXTRACK{}, to avoid mis-submissions in case something +went wrong with the \MADX{} jobs. Checking can be performed e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -x "mad6t.sh -c" +\end{lstlisting} +\item submit the actual \SIXTRACK{} jobs, e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -x "run_six.sh -a -p BOINC" +\end{lstlisting} +Submission is explicitely done to the \texttt{BOINC} platform +for all the studies. The usual list of platforms supported by +\texttt{run\_six.sh} is available; +\item download results and update the job database +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -x "run_results" +\end{lstlisting} +The same command can be issued with \texttt{run\_status}; +\item \texttt{scans.sh} can be used for calling any script + in \SIXDESK{}, e.g. +\begin{lstlisting} +> $SixDeskTools/utilities/bash/scans.sh -x "correct_cases" +\end{lstlisting} +\end{enumerate} diff --git a/utilities/doc/incAcknowledgement.tex b/utilities/doc/incAcknowledgement.tex new file mode 100644 index 0000000..5c0bcb6 --- /dev/null +++ b/utilities/doc/incAcknowledgement.tex @@ -0,0 +1,2 @@ +\chapter*{Acknowledgement} +Some acknowledgements. diff --git a/utilities/doc/incTitlePage.tex b/utilities/doc/incTitlePage.tex new file mode 100644 index 0000000..9e393b0 --- /dev/null +++ b/utilities/doc/incTitlePage.tex @@ -0,0 +1,54 @@ +\begin{titlepage} +\begin{center}\normalsize\scshape + European Organization for Nuclear Research \\ + CERN BE/ABP +\end{center} +\vspace*{2mm} +\begin{flushright} + CERN/xx/xx \\ + Updated April 2018 +\end{flushright} +\begin{center}\Huge + \textbf{SixDesk} \\ + \LARGE Version 1.0 \\ + \vspace*{8mm} the Simulation Environment for SixTrack\\ + \vspace*{8mm}\textbf{User's Reference Manual} +\end{center} +\begin{center} + R.~De Maria, M.~Giovannozzi, E.~McIntosh, A.~Mereghetti, F.~Schmidt, + I.~Zacharov \\ + \vspace*{4mm}Updated by: + P.~D.~Hermes, D.~Pellegrini, S.~Kostoglou +\end{center} +\begin{center}\large + \vspace*{10mm}\textbf{Abstract} \\ +\end{center} +\SIXTRACK{}~\cite{SixTrack_user_manual,SixPub,sixtrackWeb} +is a single particle tracking code widely used at CERN. One of its +most important applications is the estimation of the dynamic aperture available +in large storage rings like the Large Hadron Collider (LHC) or the Future +Circular Collider (FCC). These studies require massive computing resources, +since they consist of scans over large parameter spaces probing non-linear beam +dynamics over long times. +The \SIXDESK{}~\cite{SixDesk_original,SixDesk_updated} environment is the +simulation framework used to manage and control the large amount of +information necessary for and produced by the studies. \\ +This document updates the previous documentation, and describes how massive +tracking campaigns can be performed with \SIXTRACK{} +starting from a \MADX{} ``mask'' file. +The \SIXDESK{} environment is an ensemble of shell scripts and configuration +files, aimed at easing the everyday life of the user interested in performing +large parameter scans with \SIXTRACK{}. +% It describes a new set +% of UNIX BASH or Korn shell scripts which allow the use of the Berkeley Open +% Infrastructure for Network Computing, BOINC~\cite{Boinc}) as an alternative to +% the Linux LSF batch system. This note is also published and regularly updated +% on the web page +% \myhref{http://cern.ch/sixtrack-ng/doc/sixdesk/sixdesk_env.html}{cern.ch/sixtrack-ng/doc/sixdesk/sixdesk\_env.html}. +\vfill +\begin{center} + Geneva, Switzerland \\ + \today +\end{center} + +\end{titlepage} diff --git a/utilities/doc/makefile b/utilities/doc/makefile new file mode 100644 index 0000000..8ac9799 --- /dev/null +++ b/utilities/doc/makefile @@ -0,0 +1,15 @@ + +manName=sixdesk + +all: $(manName).pdf + +clean: + rm -f $(manName).blg $(manName).bbl $(manName).toc $(manName).out $(manName).aux $(manName).log $(manName).lot *~ + +cleanall: + make clean + rm -f $(manName).pdf + +$(manName).pdf: *.tex + pdflatex $(manName).tex + pdflatex $(manName).tex diff --git a/utilities/doc/sixdesk.pdf b/utilities/doc/sixdesk.pdf new file mode 100644 index 0000000..7f572df Binary files /dev/null and b/utilities/doc/sixdesk.pdf differ diff --git a/utilities/doc/sixdesk.tex b/utilities/doc/sixdesk.tex new file mode 100644 index 0000000..6c73bd6 --- /dev/null +++ b/utilities/doc/sixdesk.tex @@ -0,0 +1,174 @@ +\documentclass[twoside,a4paper,11pt]{report} +\usepackage[twoside,a4paper,top=25mm,bottom=25mm,left=20mm,right=20mm]{geometry} + +\usepackage{float} +%\usepackage{graphics} +\usepackage[dvips]{epsfig,rotating} + +\usepackage{verbatim} +\usepackage{amsmath} +\usepackage{amssymb} + +%\usepackage{html} + +% Table Settings +\usepackage{longtable} +\usepackage[table]{xcolor} +\usepackage{tabu} +\usepackage{multirow,makecell} +\usepackage{tabularx} +% \usepackage{makecell} +%\tabulinesep=^1mm_1mm +\renewcommand{\arraystretch}{1.1} + +\usepackage{caption} +\captionsetup[table]{skip=2pt} + +\usepackage{todonotes} +%\usepackage{draftwatermark} + +\usepackage{fancyvrb} + +% Colours +\definecolor{cverbbg}{gray}{0.95} +\definecolor{cverbbd}{gray}{0.35} +\definecolor{notered}{rgb}{0.70,0.0,0.0} +\definecolor{linkred}{rgb}{0.70,0.1,0.0} + +% Links +\usepackage{hyperref} +\usepackage{url} +\hypersetup{colorlinks=true, citecolor=blue, urlcolor=blue, linkcolor=linkred} +\urlstyle{same} + +% Verbatim Box +\newenvironment{cverbatim} + {\SaveVerbatim{cverb}} + {\endSaveVerbatim + \scriptsize\flushleft\addtolength{\leftskip}{5mm} + \fboxrule=0.4pt\fboxsep=0.6em + \fcolorbox{cverbbd}{cverbbg}{\BUseVerbatim{cverb}}% + \endflushleft\normalsize +} + +\newenvironment{ctverbatim} + {\SaveVerbatim{ctverb}} + {\endSaveVerbatim + \tiny\flushleft\addtolength{\leftskip}{5mm} + \fboxrule=0.4pt\fboxsep=0.6em + \fcolorbox{cverbbd}{cverbbg}{\BUseVerbatim{ctverb}}% + \endflushleft\normalsize +} + +\newenvironment{description_alligned}[1] +{\begin{list}{}% + {\renewcommand\makelabel[1]{##1:\hfill}% + \settowidth\labelwidth{\makelabel{#1}}% + \setlength\leftmargin{\labelwidth} + \addtolength\leftmargin{\labelsep}}} + {\end{list}} + +\setcounter{secnumdepth}{3} +\setcounter{tocdepth}{3} +\pagestyle{headings} +\raggedbottom + +% Page Layout +%************* + +\usepackage{fancyhdr} + +% Plain Page Numbering +\fancypagestyle{plain}{ + \fancyhf{} + \fancyfoot[LE,RO]{\thepage} +} + +% Header style for numbered chapters +\newcommand{\defaulthead}{ + \fancyhead[LE]{\nouppercase{\scshape\leftmark}} + \fancyhead[RO]{\nouppercase{\scshape\rightmark}} +} + +% Custom heading for unnumbered chapters +\newcommand{\simplehead}[1]{ + \fancyhead[LE]{\nouppercase{\itshape #1}} + \fancyhead[RO]{\nouppercase{\itshape #1}} +} + +\pagestyle{fancy} + +% Page Header +\renewcommand{\chaptermark}[1]{\markboth{\chaptername\ \thechapter: #1}{}} +\renewcommand{\sectionmark}[1]{\markright{\thesection\ #1}{}} +\renewcommand{\headrulewidth}{0pt} +\renewcommand{\footrulewidth}{0pt} + +% new commands +%************* +\newcommand{\SIXDESK}{{\sc \textsc{SixDesk}}} +\newcommand{\SIXTRACK}{{\sc \textsc{SixTrack}}} +\newcommand{\MADX}{{\sc \textsc{MadX}}} +\newcommand{\SIXDB}{{\sc \textsc{SixDB}}} + +% fancy box with terminal line commands +%************************************** +\usepackage{listings} + +\usepackage{color} + +\definecolor{dkgreen}{rgb}{0,0.6,0} +\definecolor{gray}{rgb}{0.5,0.5,0.5} +\definecolor{mauve}{rgb}{0.58,0,0.82} + +% https://en.wikibooks.org/wiki/LaTeX/Source_Code_Listings +\lstset{ + language=sh, + basicstyle=\small\ttfamily, + numbers=left, + numberstyle=\tiny, + backgroundcolor=\color{lightgray}, + breaklines=true, + columns=fullflexible, + showstringspaces=false} + +\fancyhf{} +\defaulthead +\headheight 15pt +\fancyfoot[LE,RO]{\thepage} + +% Main Document +%************* +\begin{document} + +\pagenumbering{roman} + +\input{incTitlePage} +%\cleardoublepage +\pdfbookmark{Acknowledgements}{Acknowledgements} +\input{incAcknowledgement} +%\cleardoublepage + +\tableofcontents +%\cleardoublepage +\pagenumbering{arabic} + +% Chapters +\input{chIntroduction} +%\cleardoublepage +\input{chNewFeatures} +%\cleardoublepage +\input{chGuidelines} +%\cleardoublepage + +% \appendix +% \renewcommand{\chaptername}{Appendix} +% \input{apxKeywords} +% \cleardoublepage + +\input{theBibliography} +%\cleardoublepage +\listoftables +%\cleardoublepage + +\end{document} diff --git a/utilities/doc/theBibliography.tex b/utilities/doc/theBibliography.tex new file mode 100644 index 0000000..b05fc5d --- /dev/null +++ b/utilities/doc/theBibliography.tex @@ -0,0 +1,85 @@ +\begin{thebibliography}{99} + +\bibitem{SixTrack_user_manual} + F.~Schmidt, ``SixTrack: Version 4.7.16, Single Particle + Tracking Code Treating Transverse Motion with Synchrotron + Oscillations in a Symplectic Manner, User's Reference Manua'', + CERN/SL/94--56 (AP), CERN, Geneva, Switzerland (2017), + \url{http://sixtrack.web.cern.ch/SixTrack/docs/user_full/manual.php} + +\bibitem{SixPub} + G.~Ripken and F.~Schmidt, + ``A symplectic six-dimensional thin-lens formalism for tracking'', + CERN, Geneva, Switzerland, Rep.~CERN/SL/95--12(AP), 1995. + +\bibitem{sixtrackWeb} SixTrack, \url{http://sixtrack.web.cern.ch/SixTrack} + +\bibitem{SixDesk_original} + M.~Hayes and F.~Schmidt, ``Run Environment for SixTrack'', + LHC Project Note 300, CERN, Geneva, Switzerland (2002), + \url{https://cds.cern.ch/record/691785/files/project-note-300.pdf} + +\bibitem{SixDesk_updated} + E.~McIntosh and R.~De Maria, ``The SixDesk Run Environment for SixTrack'', + CERN-ATS-TE-2012-089 TECH, CERN, Geneva, Switzerland (2002), + \url{https://github.com/SixTrack/SixDesk/blob/master/sixjobs/doc/sixdesk_env.pdf} + +\bibitem{DynApeStudiesGiovannozzi2015} + M.~Giovannozzi \emph{et al.}, + ``Dynamic Aperture Studies for the LHC Hight Luminosity Lattice'', + in \emph{Proc.~6$^{\textrm{th}}$ Int.~Particle Accelerator Conf.~(IPAC'15)}, + Richmond, VA, USA, May 2015, paper MOPMN003, + pp.~705--709. + +\bibitem{NomLHCdesignRepoV1} + O.~Br\"{u}ning \emph{et al.} (eds), ``LHC design report'', Vol.~I, + CERN, Geneva, Switzerland, Rep.~CERN-2004-003-V-1, 2004. + +\bibitem{HLLHC_book} + O.~Br\"{u}ning, L.~Rossi (eds.), + ``The High Luminosity Large Hadron Collider'', + World Scientific Press, 2015, ISBN 978-981-4675-46-8. + +\bibitem{HLLHCtechDesRepo} + G.~Apollinari, I.~Bejar Alonso, O.~Br\"{u}ning, M.~Lamont, L.~Rossi (eds.), + ``High Luminosity Large Hadron Collider (HL-LHC) Technical Design Report + V.01'', CERN, Geneva, Switzerland, EDMS n.~1723851 v.0.71, + \url{https://edms.cern.ch/ui/file/1723851/0.71/HL_TDR_V07.0.2016.10.05.Version15.2h.pdf} + +\bibitem{SixTrackForCollimation} + G.~Robert-Demolaize, R.~Assmann, S.~Redaelli, F.~Schmidt, + ``A New Version of SixTrack with Collimation and Aperture Interface'', + in \emph{Proc.~Particle Accelerator Conf.~(PAC'05)}, + Knoxville, TN, USA, 2005, paper FPAT081, + pp.~4084--4087. + +\bibitem{LHCCollSys} + R.W.~Assmann \emph{et al.}, + ``The Final Collimation System for the LHC'', + in \emph{Proc.~10$^{\textrm{th}}$ European Particle Accelerator Conf.~(EPAC'06)}, + in \emph{Proc.~EPAC'06}, + Edinburgh, Scotland, UK, Jun 2006, paper TUODFI01, + pp.~986--988. + +\bibitem{RHICcollSys} + G.~Robert-Demolaize and A.~Drees, ``Simulations of collimation + losses at RHIC'', in \emph{Proc.~Tracking for Collimation Workshop}, + CERN, Geneva, Switzerland, Oct 2015, unpublished. + +\bibitem{HLLHCTrackWS} + S.~Redaelli (ed.), ``Proceedings of the tracking for collimation + workshop'', CERN, Geneva, Switzerland, Oct 2015, unpublished. + +\bibitem{Amereghe6TColl} + A.~Mereghetti \emph{et al.}, + ``SixTrack for Cleaning Studies: 2017 Updates'', + in \emph{Proc.~8$^{\textrm{th}}$ Int.~Particle Accelerator Conf.~(IPAC'17)}, + Copenhagen, DK, May 2017, paper THPAB046, + pp.~3811--3814. + +\bibitem{Boinc} ``Berkeley Open Infrastructure for Network Computing'', + \url{http://boinc.berkeley.edu} + + \addcontentsline{toc}{chapter}{Bibliography} +\end{thebibliography} + diff --git a/utilities/python/generate_floats.py b/utilities/python/generate_floats.py index b92c91a..c793a3c 100644 --- a/utilities/python/generate_floats.py +++ b/utilities/python/generate_floats.py @@ -1,34 +1,273 @@ import sys -xmin=float(sys.argv[1]) -xmax=float(sys.argv[2]) -xdelta=float(sys.argv[3]) -prec=float(sys.argv[4]) - -# checks -if ( xmax0.0 and xstopxstart ): + if (lInvertExtremes): + tmpX=xstop + xstop=xstart + xstart=tmpX + else: + xdelta=-xdelta + if (lDebug): + print '1b:',xstart,xstop,xdelta,prec + + # - grant single points + if ( xdelta==0.0 ): + if ( xstop==0.0 ): + if ( xstart==0.0 ): + xfin=1 + else: + xfin=xstart + else: + if ( xstart==0.0 ): + xfin=xstop + else: + xfin=xstart + xdelta=xfin + xstop=xstart+xfin*0.5 + if (lDebug): + print '2:',xstart,xstop,xdelta,prec + else: + if ( abs(xstop-xstart)0.0): + sign=1 + else: + sign=-1 + while ( x*sign only integer part + x_i=data[0] + x_f='' + else: + x_i=data[0] + x_f=data[1] + return x_i,x_f + +def extremesInt( xstart, xstop, xdelta ): + # get strings of integer and fractional parts + xstart_i, xstart_f = split( xstart ) + xstop_i , xstop_f = split( xstop ) + xdelta_i, xdelta_f = split( xdelta ) + + # analyse fractional parts + # - get longest one: + ll=max([len(xstart_f),len(xstop_f),len(xdelta_f)]) + # - pad with zeros: + xstart_f=xstart_f.ljust(ll,'0') + xstop_f =xstop_f.ljust(ll,'0') + xdelta_f=xdelta_f.ljust(ll,'0') + # - remove useless zeros: + while( len(xstart_f)>0 and len(xstop_f)>0 and len(xdelta_f)>0 and xstart_f[-1]=='0' and xstop_f[-1]=='0' and xdelta_f[-1]=='0' ): + xstart_f=xstart_f[:-1] + xstop_f =xstop_f[:-1] + xdelta_f=xdelta_f[:-1] + ll=ll-1 + + # build actual integer-made numbers: + istart=int(xstart_i+xstart_f) + istop =int(xstop_i+xstop_f) + idelta=int(xdelta_i+xdelta_f) + + return istart, istop, idelta, ll + +def checkInts( istart, istop, idelta ): + # - inverted extremes + if ( idelta>0 and istopistart ): + if (lInvertExtremes): + tmpI=istop + istop=istart + istart=tmpI + else: + idelta=-idelta + if (lDebug): + print '1b:',istart,istop,idelta + + # - grant single points + if ( idelta==0 ): + idelta=1 + if ( istart!=istop ): + istop=istart + if (lDebug): + print '2:',istart,istop,idelta + + return istart,istop,idelta + +def genIntValues( istart, istop, idelta, ll, sym='.' ): + values=[] + if (idelta<0): + jdelta=-1 + else: + jdelta=1 + for x in range(istart,istop+jdelta,idelta): + tmp=str(x) + if ( ll>0 ): + output='' + if (tmp[0]=='-'): + output='-' + tmp=tmp[1:] + lt=len(tmp) + if (lt<=ll): + output+='0'+sym+''.ljust(ll-lt,'0')+tmp + else: + output+=tmp[:-ll]+sym+tmp[-ll:] + if ( lForceIntegers and output[-ll:]==''.ljust(ll,'0') ): + output=output[:len(output)-(ll+1)] # skip also the sym + if ( lRemoveTrailingZeros and sym in output ): + while( output[-1]=='0' ): + output=output[:-1] + else: + output=tmp + if (lDebug): + print 'gen:',x,output,ll,lForceIntegers + values.append(output) + if lSkipExtremes: + values=values[1:-1] + return values + +# ============================================================================== +# main +# ============================================================================== + +if ( __name__ == "__main__" ): + # some flags + lDebug=False + lInvertExtremes=False + + # terminal-line input parameters + xstart=sys.argv[1] + xstop=sys.argv[2] + xdelta=sys.argv[3] + + # skip extremes? + if ( len(sys.argv)>4 ): + lSkipExtremes=sys.argv[4].lower()=="true" + else: + lSkipExtremes=False + + # float-based or int-based loop? + if ( len(sys.argv)>5 ): + lIntegerBased=sys.argv[5].lower()=="true" + else: + lIntegerBased=True + + # dump an integer as .0 or as int? + if ( len(sys.argv)>6 ): + lForceIntegers=sys.argv[6].lower()=="true" + else: + lForceIntegers=False + + if ( lIntegerBased ): + + # remove trailing zeros + if ( len(sys.argv)>7 ): + lRemoveTrailingZeros=sys.argv[7].lower()=="true" + else: + lRemoveTrailingZeros=True + + # acquire values + istart, istop, idelta, ll = extremesInt( xstart, xstop, xdelta ) + if (lDebug): + print 'ints:', istart, istop, idelta, ll + # sanity checks + istart, istop, idelta = checkInts( istart, istop, idelta ) + if (lDebug): + print 'after sanity checks:', istart, istop, idelta, ll + # loop + values = genIntValues( istart, istop, idelta, ll ) + for value in values: + print value + if (lDebug): + print 'end loop:', istart, istop, idelta, ll + else: + if ( len(sys.argv)>7 ): + prec=float(sys.argv[7]) + else: + prec=1.0E-15 + # make them float, in case + xstart=float(xstart) + xstop=float(xstop) + xdelta=float(xdelta) + prec=float(prec) + if (lDebug): + print 'floats:',xstart,xstop,xdelta,prec + # sanity checks + xstart, xstop, xdelta = checkValues( xstart, xstop, xdelta, prec ) + if (lDebug): + print 'after sanity checks:',xstart,xstop,xdelta,prec + # loop + values = genValues( xstart, xstop, xdelta, prec ) + for value in values: + print value + if (lDebug): + print 'end loop:',xstart,xstop,xdelta,prec