#!/bin/bash

#$ -V
#$ -cwd
#$ -j y
#$ -o $JOB_NAME.$JOB_ID

set -e
set -o errexit
set -o pipefail

RED='\033[0;31m'
DEF='\033[0m'
BOLD='\033[1m'


function usage {
	usage=$(cat <<'EOUSAGE'
Usage: dag-wf-run [--cleanup-outputs] [--cleanup-conda] [-h]

  Options:
	* --cleanup-outputs: Remove all outputs generate by running workflow
	* --cleanup-conda: Remove workflows conda environment
	* -h: Display help
EOUSAGE
)

	echo "$usage"
	echo
	exit 0
}

function setup_error {
	printf "\n${RED}Error: This doesn't look like a configured workflow directory...${DEF}\n\n"
	echo -e "Please ensure this directory has been created using dag-wf-setup\n"
	exit 1
}

function conda_error {
	echo -e "Please check you have activated the correct conda environment\n"
	exit 2
}

function cleanup_outputs {
	snakemake --delete-all-output
	exit 
}

function cleanup_conda {
	snakemake --cleanup-conda
	exit
}

ETC_DIR=${CONDA_PREFIX}/etc/dag-wf

if [[ -e $CONDA_PREFIX/bin/dag-wf-parse-yaml ]]; then
	source $CONDA_PREFIX/bin/dag-wf-parse-yaml
else 
	printf "\n${RED}Error: dag-wf-parse-yaml not found${DEF}\n\n"
	conda_error
fi

# Option parsing...
optspec=":h-:"

while getopts "$optspec" optchar; do
    case "${optchar}" in
        -)
            case "${OPTARG}" in
                cleanup-outputs)
					cleanup_outputs
                    ;;
                cleanup-conda)
					cleanup_conda
                    ;;
                *)
                    echo "Unknown option --${OPTARG}" >&2
			usage
                    ;;
            esac;;
        h)
			usage
            ;;
        *)
            if [ "$OPTERR" != 1 ] || [ "${optspec:0:1}" = ":" ]; then
                printf "\n${RED}Error: Unknown argument: '-${OPTARG}'${DEF}\n\n" >&2
				usage
            fi
            ;;
    esac
done



if [[ -e .workflow ]]; then
	wf=$(cat .workflow)
else 
	setup_error
fi

if [[ -e .jobname ]]; then
	JOBNAME=$(cat .jobname)
else
	JOBNAME=${wf}
fi

if [[ -e ${ETC_DIR}/${wf}/meta.yaml ]]; then
	eval $(parse_yaml ${ETC_DIR}/${wf}/meta.yaml)
	if [[ -z $wf__in_dir ]] || [[ -z $wf__in_name ]] || [[ -z $wf__slots_per_sample ]]; then
		printf "\n${RED}Error: Could not parse ${ETC_DIR}/${wf}/${meta.yaml}${DEF}\n\n"
		exit 3
	fi
else 
	printf "\n${RED}Error: No ${ETC_DIR}/${wf}/meta.yaml file found...${DEF}\n\n"
	conda_error
fi

if [[ ! -e Snakefile ]]; then
	printf "\n${RED}Error: No Snakefile found...${DEF}\n\n"
	setup_error
fi

if [[ ! -d ${wf__in_dir} ]]; then 
	printf "\n${RED}Error: No ${wf__in_dir} directory found...${DEF}\n\n"
	setup_error
fi

if [[ $(which snakemake 2>&1|grep -c 'no snakemake') != 0 ]]; then
	printf "\n${RED}Error: snakemake not found...${DEF}\n\n"
	conda_error
fi

date +"%a %b %d %Y %X" > .start_date.txt
START_DATE=$(date +%d%m%y_%H%M%S)

# Allow 2 jobs per-sample for parallel processing of trimming/trimmed fastqc and untrimmed fastqc
SAMPLES=$(ls ${wf__in_dir}/*${wf__in_name}|wc -l)
JOBS=$(echo $(($SAMPLES * 2)))

if ! type "qsub" > /dev/null; then
        HAVE_CLUSTER=0
else
        HAVE_CLUSTER=1
fi

source $CONDA_PREFIX/../../etc/profile.d/conda.sh
conda activate dag-wf-${wf}

if [[ ! -z "$DRMAA_LIBRARY_PATH" ]]; then 
    echo -e "\nRunning ${wf} workflow using DRMAA...\n"
    snakemake --cluster-config config.json -j ${JOBS} --jn ${JOBNAME}.{name}.{jobid}  \
	    --drmaa " -V -cwd -j y -pe smp {threads} -jc {cluster.jc} -o {cluster.o}" \
		--latency-wait 30
elif [[ "$HAVE_CLUSTER" == "1" ]]; then
    echo -e "\nRunning ${wf} workflow under cluster environment...\n"
    snakemake --cluster-config config.json -j ${JOBS} --jn ${JOBNAME}.{name}.{jobid} \
	    --cluster "qsub -V -cwd -j y -pe smp {threads} -jc {cluster.jc} -o {cluster.o}" \
		--latency-wait 30
else 
    echo -e "\nRunning ${wf} workflow\n"
    snakemake 
fi


