Skip to content
run_dl1_to_dl2.sh 1.51 KiB
Newer Older
#!/bin/bash
#Script to convert in DL2 data a set of DL1 REAL data.
#Change paths according to your system

#Some variables for retrieving the input files
DATES=( 20201120 )
#DATES=( 20201118 20201119 20201120 ) # possible to analyze more dates together
SOURCE="Crab"
#CONFIG=/fefs/aswg/workspace/alice.donini/Software/cta-lstchain/lstchain/data/lstchain_standard_config.json
VERSION="v0.9.2"
CLEANING="tailcut84"

# RF files
PATH_MODELS="/fefs/aswg/workspace/alice.donini/Analysis/data/models/source_independent"

#Script (in ./cta-lstchain/lstchain/scripts/)
SCRIPT="/fefs/aswg/workspace/alice.donini/Software/cta-lstchain/lstchain/scripts/lstchain_dl1_to_dl2.py"

for date in "${DATES[@]}"
do
    INPUT_DIR=/fefs/aswg/workspace/alice.donini/Analysis/data/DL1/${SOURCE}/${date}/${VERSION}/${CLEANING}/
    OUTPUT_DIR=/fefs/aswg/workspace/alice.donini/Analysis/data/DL2/${SOURCE}/${date}/${VERSION}/${CLEANING}/
    mkdir -p ${OUTPUT_DIR}
    mkdir -p jobs/dl2/${date}/
    for file in $(ls ${INPUT_DIR}/ | grep dl1_LST); do
        echo "File $file"
        rm -rf jobs/dl2/${date}/run_${file}.sh
        echo "#!/bin/bash
#SBATCH -N 1
#SBATCH --mem 100000
ulimit -l unlimited
ulimit -s unlimited
ulimit -a
        python $SCRIPT -f $INPUT_DIR$file -p $PATH_MODELS -o $OUTPUT_DIR" >> jobs/dl2/${date}/run_${file}.sh
        # python $SCRIPT -f $INPUT_DIR$file -p $PATH_MODELS -o $OUTPUT_DIR -c $CONFIG" >> jobs/dl2/${date}/run_${file}.sh
        chmod gu+x jobs/dl2/${date}/run_${file}.sh
        sbatch jobs/dl2/${date}/run_${file}.sh
    done
done