Commit cf3fba00 authored by Stefano Alberto Russo's avatar Stefano Alberto Russo
Browse files

Added Prefactor3 container.

parent bc6782c5
# containers
# LOFAR-IT Containers
Containers source by the Italian LOFAR group.
## Prefactor3
This container is based on the "lofaruser/imaging-pipeline:v3.10" base image, and it is available from Docker Hub as "lofarit/prefactor3:pipeline_v3.10".
How to run the container with Docker:
docker run --rm -v $INPUT_DATA_FOLDER:/input_data,$OUTPUT_DATA_FOLDER:/output_data -it lofarit/prefactor3:pipeline_v3.10
How to run the container wth Singularity:
singularity run --pid --writable-tmpfs --containall --cleanenv -B$INPUT_DATA_FOLDER:/input_data,$OUTPUT_DATA_FOLDER:/output_data docker://lofarit/prefactor3:pipeline_v3.10
In both cases you have to set the $INPUT_DATA_FOLDER and $OUTPUT_DATA_FOLDER to the input and output data folders respectively, on the host system (i.e. the machine on which you are running the container). Note that the output data folder must exists and have write permissions (if using Singularty, by the user running the container).
How to start prefactor3 once in the container:
$ ./run_pipelines.sh
This command will run the calibrator and target pipelines in sequence. Feel free to have a look and change the files as pipeline.cfg, Pre-Facet-Calibrator.parset, Pre-Facet-Target.parset and run_pipelines.sh itself in the /home/lofar directory inside the container to suit your needs.
Remember that the contents of the container (excluding data on your external volumes as the input/output data directories) will be wiped when you exit the container.
\ No newline at end of file
FROM lofaruser/imaging-pipeline:v3.10
# Set non-interactive
ENV DEBIAN_FRONTEND noninteractive
# Always update when extending base images
RUN apt update
#------------------------
# Install deps
#------------------------
# Git, Curl, sudo and Nano
RUN apt-get install git curl sudo nano -y
#------------------------
# Lofar user
#------------------------
# Add group. We chose GID 65527 to try avoiding conflicts.
RUN groupadd -g 65527 lofar
# Add user. We chose UID 65527 to try avoiding conflicts.
RUN useradd lofar -d /home/lofar -u 65527 -g 65527 -m -s /bin/bash
# Add metuaser user to sudoers
RUN adduser lofar sudo
# No pass sudo (for everyone, actually)
COPY sudoers /etc/sudoers
#------------------------
# Get prefactor
#------------------------
RUN cd /opt && git clone https://github.com/lofar-astron/prefactor
RUN cd /opt/prefactor && git pull && git checkout d4f18ff # V3.0 tag hash
# Add conf and run scripts
COPY pipeline.cfg /home/lofar/pipeline.cfg
COPY Pre-Facet-Calibrator.parset /home/lofar/Pre-Facet-Calibrator.parset
COPY Pre-Facet-Target.parset /home/lofar/Pre-Facet-Target.parset
COPY run_pipelines.sh /home/lofar/run_pipelines.sh
COPY data/input_data /input_data
COPY data/output_data /output_data
RUN chmod 755 -R /home/lofar/run_pipelines.sh && chown lofar:lofar /home/lofar && chown -R lofar:lofar /input_data && chown -R lofar:lofar /output_data
# This is required mainly for Singularity
RUN mv /home/lofar /home/vanilla_lofar
RUN ln -s /tmp/lofarhome /home/lofar
RUN rm -rf /tmp/lofarhome
#----------------------
# Entrypoint
#----------------------
# Copy entrypoint
COPY entrypoint.sh /
# Give right permissions
RUN chmod 755 /entrypoint.sh
# Set entrypoint
ENTRYPOINT ["/entrypoint.sh"]
# Set user lofar
USER lofar
This diff is collapsed.
This diff is collapsed.
#!/bin/bash
docker build . -t lofarit/prefactor3:pipeline_v3.10
Put here "calib" and "target" MS data (or link agains a folder containing them).
Directory for output data.
#!/bin/bash
# Exit on any error. More complex thing could be done in future
# (see https://stackoverflow.com/questions/4381618/exit-a-script-on-error)
set -e
echo ""
echo "[INFO] Executing entrypoint..."
echo "[INFO] Sourcing env in /opt/lofarsoft/lofarinit.sh..."
source /opt/lofarsoft/lofarinit.sh
echo "[INFO] Creating /tmp/lofarhome to be used as lofar home"
mkdir /tmp/lofarhome
echo "[INFO] Initialising /tmp/lofarhome with configuration files"
cp -a /home/vanilla_lofar/* /tmp/lofarhome
echo "[INFO] Moving to /home/lofar and setting as home"
cd /home/lofar
export HOME=/home/lofar
echo "[INFO] Setting new prompt @prefactor3 container"
echo 'export PS1="${debian_chroot:+($debian_chroot)}\u@prefactor3@\h:\w\$ "' > /tmp/lofarhome/.bashrc
# Set entrypoint command
if [ "x$@" == "x" ]; then
COMMAND="/bin/bash"
else
COMMAND="$@"
fi
# Start!
echo -n "[INFO] Will execute entrypoint command: "
echo $COMMAND
echo ""
echo "=============================================================="
echo "| Welcome to the Prefactor3 container! |"
echo "=============================================================="
echo ""
echo "You are now in /home/lofar with write access as user \"$(whoami)\"."
echo ""
echo "Remember that contents inside this container, unless stored"
echo "on a persistent volume mounted from you host machine, will"
echo "be wiped out when exiting the container."
echo ""
exec $COMMAND
[DEFAULT]
lofarroot = /opt/lofarsoft
casaroot = %(lofarroot)s
pyraproot =
hdf5root =
wcsroot =
# /opt/lofar/lib/python2.7/site-packages
pythonpath = %(lofarroot)s/lib/python2.7/site-packages
# /opt/lofar/lib/python2.7/site-packages/lofarpipe/recipes
recipe_directories = [%(pythonpath)s/lofarpipe/recipes]
runtime_directory = /output_data
working_directory = %(runtime_directory)s
# [/opt/lofar/share/pipeline/tasks.cfg]
task_files = [%(lofarroot)s/share/pipeline/tasks.cfg]
[layout]
job_directory = %(runtime_directory)s/%(job_name)s
[cluster]
clusterdesc = %(working_directory)s/pipeline.clusterdesc
[deploy]
engine_ppath = %(pythonpath)s:%(pyraproot)s/lib:/opt/cep/pythonlibs/lib/python/site-packages
engine_lpath = %(lofarroot)s/lib:%(casaroot)s/lib:%(pyraproot)s/lib:%(hdf5root)s/lib:%(wcsroot)s/lib
[logging]
log_file = %(runtime_directory)s/%(job_name)s/logs/%(start_time)s/pipeline.log
xml_stat_file = %(runtime_directory)s/%(job_name)s/logs/%(start_time)s/statistics.xml
[feedback]
# Method of providing feedback to LOFAR.
# Valid options:
# messagebus Send feedback and status using LCS/MessageBus
# none Do NOT send feedback and status
method = none
[remote]
method = local
max_per_node = 1
#!/bin/bash
docker run --rm -v $PWD/data:/data -it lofarit/prefactor3:pipeline_v3.10 /bin/bash
#!/bin/bash
set -e
genericpipeline.py /home/lofar/Pre-Facet-Calibrator.parset -v -d -c /home/lofar/pipeline.cfg
genericpipeline.py /home/lofar/Pre-Facet-Calibrator.parset -v -d -c /home/lofar/pipeline.cfg
\ No newline at end of file
#!/bin/bash
singularity run --pid --writable-tmpfs --containall --cleanenv -Bdata/lofar/input_data:/data/lofar/input_data,data/lofar/output_data:/data/lofar/output_data docker://lofarit/prefactor3
\ No newline at end of file
#
# This file MUST be edited with the 'visudo' command as root.
#
# Please consider adding local content in /etc/sudoers.d/ instead of
# directly modifying this file.
#
# See the man page for details on how to write a sudoers file.
#
Defaults env_reset
Defaults mail_badpass
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# Host alias specification
# User alias specification
# Cmnd alias specification
# User privilege specification
root ALL=(ALL:ALL) ALL
# Members of the admin group may gain root privileges
%admin ALL=(ALL) ALL
# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) NOPASSWD:ALL
# See sudoers(5) for more information on "#include" directives:
#includedir /etc/sudoers.d
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment