Skip to content
run_dl2_to_dl3.sh 2.62 KiB
Newer Older
#!/bin/bash
#Script to Run the dl3_tool over a set of DL2 REAL data.
#Change paths according to your system

#Some variables for retrieving the input files
DATES=( 20201120 ) #Dates to analyze
SOURCE="Crab" #name of source
SRCRA="83.63308333deg" #source ra
SRCDEC="22.0145deg" #source dec
VERSION="v0.9.2" #lstchain version
CLEANING="tailcut84" #tailcut
INPUT_DIR="/fefs/aswg/workspace/alice.donini/Analysis/data/" #Base directory where to find the dl2 data

#Variables for the output directory
#SUFFIX="tuned_dynamic_cleaning_source_independent" #Optional suffix to add to a folder
CUT="standard" #Name of the cut applied to data ( nocuts, softcut, hardcut, hardestcut )

#Configuration file to be used if not standard cut
#CONFIG="/fefs/aswg/workspace/alice.donini/Analysis/config/config_dl3_tool${CUT}.json"

#IRF file
IRF="/fefs/aswg/workspace/alice.donini/Analysis/data/IRF/irf_40deg.fits.gz"

#Script (in ./cta-lstchain/tools/)
SCRIPT="/fefs/aswg/workspace/alice.donini/Software/cta-lstchain/lstchain/tools/lstchain_create_dl3_file.py"

for date in "${DATES[@]}" #Run over dates
do
    #Define and create an output path for each date
    OUTPUT_PATH="/fefs/aswg/workspace/alice.donini/Analysis/data/DL3/$SOURCE/$date/$VERSION/$CLEANING"
    `mkdir -p $OUTPUT_PATH`

    FILES=`ls ${INPUT_DIR}/DL2/$SOURCE/${date}/${VERSION}/${CLEANING}/*.h5`
    # Run over the merged run files
    mkdir -p jobs/dl3/${date}
    for f in $FILES
    do
        b=$(basename $f)
        run=${b:13:-3}
        if [[ $run != *"."* ]]; then
            FILE=${INPUT_DIR}/DL2/${SOURCE}/${date}/${VERSION}/${CLEANING}/dl2_LST-1.Run$run.h5
            #Execute the script. You can remove the "srun --mem=20g -o out.txt" part if you don't want to use the IT cluster
            #srun --mem=20g -o out.txt python $SCRIPT -d $FILE -o $OUTPUT_PATH --input-irf $IRF --source-name $SRCNAME --source-ra $SRCRA --source-dec $SRCDEC --config $CONFIG --overwrite &
            echo "File $b"
            rm -rf jobs/dl3/${date}/run_dl3_${run}${CUT}.sh

            echo "#!/bin/bash
#SBATCH -N 3
#SBATCH --mem 100000
ulimit -l unlimited
ulimit -s unlimited
ulimit -a

            python $SCRIPT -d $FILE -o $OUTPUT_PATH --input-irf $IRF --source-name $SOURCE --source-ra $SRCRA --source-dec $SRCDEC --overwrite " >> jobs/dl3/${date}/run_dl3_${run}${CUT}.sh
            # python $SCRIPT -d $FILE -o $OUTPUT_PATH --input-irf $IRF --source-name $SOURCE --source-ra $SRCRA --source-dec $SRCDEC --config $CONFIG --overwrite " >> jobs/dl3/${date}/run_dl3_${run}${CUT}.sh
            chmod gu+x jobs/dl3/${date}/run_dl3_${run}${CUT}.sh
            sbatch jobs/dl3/${date}/run_dl3_${run}${CUT}.sh
        fi
    done
done