From 84d5d79099ecceee60f39babfd0f6a75665b926c Mon Sep 17 00:00:00 2001
From: fBeyer89 <fbeyer@cbs.mpg.de>
Date: Wed, 7 Feb 2024 14:41:16 +0100
Subject: [PATCH] added singularity creation script and changing to bash script
 execution

---
 README.md                                     | 15 +++--
 bullseye_workflow/run_bullseye.sh             | 22 +++++++
 .../run_bullseye_WMH_segmentation.py          | 61 ++++++-------------
 documentation_of_singularity_image_creation   | 32 ++++++++++
 4 files changed, 81 insertions(+), 49 deletions(-)
 create mode 100644 bullseye_workflow/run_bullseye.sh
 create mode 100644 documentation_of_singularity_image_creation

diff --git a/README.md b/README.md
index bf2a0a1..fc83463 100644
--- a/README.md
+++ b/README.md
@@ -18,12 +18,15 @@ use singularity container with pre-installed environment:
 
 
 ## Usage
-- have FreeSurfer version 6.0.0p1 or higher loaded
-- Python 2 environment with nipype, nibabel (```agewell_nip1.2```)
-- change into the respective directory 
-- define subjects to run it in ll. 83 ff of `run_bullseye_WMH_segmentation.py`
-- type `python run_bullseye_WMH_segmentation.py` to execute
-
+###  Own environments:
+- have FreeSurfer version 7.3.1 (or at least>=6.0.0p1)
+- activate python==3.9 environment with nipype==1.8.6, nibabel==?  (```beseg```)
+    - e.g. installation via conda: `conda create -n below python nipype nibabel` `pip install pandas`
+- provide table of subjects to run and/or select them in ll. 83 ff in `run_bullseye_WMH_segmentation.py`
+- run `python run_bullseye_WMH_segmentation.py` 
+
+### Singularity container with all environments pre-installed
+- type 
 ## Support
 fbeyer@cbs.mpg.de 
 
diff --git a/bullseye_workflow/run_bullseye.sh b/bullseye_workflow/run_bullseye.sh
new file mode 100644
index 0000000..e4f1019
--- /dev/null
+++ b/bullseye_workflow/run_bullseye.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+"""
+Created on Wed Feb  7 14:25:03 2024
+
+@author: fbeyer
+"""
+ # specify the location of the preprocessed data     
+working_dir="/data/pt_life_whm/Data/wd/" # where intermediate files will be saved 
+freesurfer_dir="/data/pt_life_freesurfer/freesurfer_all" # where freesurfer output directories are located
+
+# Set FreeSurfer directory
+SUBJECTS_DIR = $freesurfer_dir
+
+for subject_id in subj_list
+do
+    
+LESION="/data/pt_life_whm/Data/LST/sub-${subject_id}/ples_lpa_mFLAIR_bl.nii.gz" #exact file name of wmh probability map
+FLAIR="/data/pt_life_whm/Data/LST/sub-${subject_id}/mFLAIR_bl.nii.gz" #exact name of FLAIR/T2 image used for registration
+
+python run_bullseye_segmentation.py $subject_id $working_dir $freesurfer_dir $FLAIR $LESION
+
+done
\ No newline at end of file
diff --git a/bullseye_workflow/run_bullseye_WMH_segmentation.py b/bullseye_workflow/run_bullseye_WMH_segmentation.py
index 5e08c5e..0eb4137 100644
--- a/bullseye_workflow/run_bullseye_WMH_segmentation.py
+++ b/bullseye_workflow/run_bullseye_WMH_segmentation.py
@@ -23,73 +23,48 @@ import matplotlib
 import matplotlib.pyplot as plt
 import pandas as pd
 import os
+import sys
 
 
 
-def create_bullseye_lesion(subjectlist):
+def create_bullseye_lesion(subject, base_dir, freesurfer_dir, flair_file, wmh_file):
     """
     a workflow to extract wmh in bullseye segmented wm
     """
 
-    # specify the location of the preprocessed data    
-    working_dir="/data/pt_life_whm/Data/wd/" # where intermediate files will be saved 
-    freesurfer_dir="/data/pt_life_freesurfer/freesurfer_all" # where freesurfer output directories are located
-    flairdir="/data/pt_life_whm/Data/LST/" # where FLAIR/T2 images and wmh probability maps are located (assumed they are in the same directory)
-    outdir="/data/pt_life_whm/Data/WMparcellations_indiv/" # where outputs will be saved
-
-    os.environ['SUBJECTS_DIR'] = freesurfer_dir
-
-
-    # create node which returns subjects as iterables	
-    identitynode = Node(util.IdentityInterface(fields=['subject']),
-                    name='identitynode')
-    identitynode.iterables = ('subject', subjectlist)
-    
-
-
-    # select flair/t2 and wmh images
-    template_lesion = {"LESION":"sub-{subject_id}/ples_lpa_mFLAIR_bl.nii.gz", #exact file name of wmh probability map
-                      "FLAIR":"sub-{subject_id}/mFLAIR_bl.nii.gz"} #exact name of FLAIR/T2 image used for registration
-    fileselector_lesion = pe.Node(SelectFiles(template_lesion), name='fileselect_lesion')
-    fileselector_lesion.inputs.base_dir=base_directory
-    fileselector_lesion.subjects_dir=freesurfer_dir
-
-
     # main workflow
     bullseye_lesion = Workflow(name="bullseyelesion_bbreg")
-    bullseye_lesion.base_dir=working_dir
+    bullseye_lesion.base_dir=base_dir
     
     # bullseye wm segmentation part    
     bullseye=create_bullseye_pipeline()
     bullseye.inputs.inputnode.scans_dir=freesurfer_dir
+    bullseye.inputs.inputnode.subject_id=subject
 
     # wmh registration to freesurfer
     lesionreg=create_flairreg_pipeline()
     lesionreg.inputs.inputnode.freesurfer_dir=freesurfer_dir
-    lesionreg.inputs.inputnode.flair_dir=flairdir
+    lesionreg.inputs.inputnode.FLAIR=flair_file
+    lesionreg.inputs.inputnode.LESION=wmh_file
 
     # extract wmh volumes from be segmentation 
     extractparc=Node(interface=util.Function(input_names=['in1_file', 'in2_file', 'subject_id', 'option'], output_names=['out_file'],
                                                function=extract_parcellation), name='extractparc')
-    extractparc.inputs.option="new"                                           
+    extractparc.inputs.option="new" 
+    extractparc.inputs.inputnode.subject=subject                                 
     
     # generate datasink
     datasink=Node(name="datasink", interface=nio.DataSink())
-    datasink.inputs.base_directory = outdir
+    datasink.inputs.base_directory = base_dir
     datasink.inputs.substitutions = [('_subject_', '')]
 
     # connect all nodes
     bullseye_lesion.connect([
-        (identitynode, bullseye, [("subject", "inputnode.subject_id")]),
-        (identitynode, fileselector_lesion,[('subject', 'subject_id')]),
-        (fileselector_lesion, lesionreg,[( 'FLAIR', 'inputnode.FLAIR')]),
-        (fileselector_lesion, lesionreg,[( 'LESION', 'inputnode.LESION')]),
         (bullseye, extractparc, [( 'outputnode.out_bullseye_wmparc', 'in2_file')]),
         (lesionreg, extractparc, [('outputnode.lesion2anat', 'in1_file')]),
         (bullseye, datasink,[( 'outputnode.out_bullseye_wmparc', '@bullseye')]),
         (lesionreg, datasink,[( 'outputnode.lesion2anat', '@lesion2anat')]),
         (lesionreg, datasink,[( 'outputnode.flair2anat', '@flair2anat')]),
-        (identitynode, extractparc,[( 'subject', 'subject_id')]),
         (extractparc, datasink,[( 'out_file', '@lesionparc')]),
     ])
    
@@ -98,16 +73,16 @@ def create_bullseye_lesion(subjectlist):
     return bullseye_lesion
 
 
-# select participants
-df=pd.read_csv('/data/participants.csv', sep=',')
-
-df=df[df["dementia"]==0]
-subj=df['pseudonym'].values
+subject=sys.argv[1] 
+base_dir=sys.argv[2] 
+freesurfer_dir=sys.argv[3] 
+flair_file=sys.argv[4]  
+wmh_file=sys.argv[5] 
 
-# run workflow in multi-thread mode
-bullseye_lesion=create_bullseye_lesion(subj)
-bullseye_lesion.write_graph(graph2use='colored', simple_form=True)
-bullseye_lesion.run(plugin='MultiProc', plugin_args={'n_procs' : 16})    #
+# run workflow in single-thread mode
+bullseye_lesion=create_bullseye_lesion(subject, base_dir, freesurfer_dir, flair_file, wmh_file)
+#bullseye_lesion.write_graph(graph2use='colored', simple_form=True)
+bullseye_lesion.run()
 
 
 
diff --git a/documentation_of_singularity_image_creation b/documentation_of_singularity_image_creation
new file mode 100644
index 0000000..7dbbb4c
--- /dev/null
+++ b/documentation_of_singularity_image_creation
@@ -0,0 +1,32 @@
+# Installation of neurodocker -> https://www.repronim.org/neurodocker/user_guide/quickstart.html
+miniconda3
+
+conda create -n neurodocker python=3.9
+conda activate neurodocker
+python -m pip install neurodocker
+neurodocker --help
+
+source activate neurodocker
+
+ssh comps08h04
+cd /tmp/neurodocker
+
+# Generate singularity image
+neurodocker generate singularity \
+    --pkg-manager apt \
+    --base-image neurodebian:bullseye\
+    --freesurfer version=7.3.0 \
+    --miniconda version=latest conda_install="python==3.10.13 nipype==1.8.6 nibabel==5.2.0 numpy=1.26.3 pandas==2.1.4" \
+    --user nonroot > becon
+     
+    
+# building a new image using the Singularity file
+
+export SINGULARITY_TMPDIR=/tmp/TMP
+export SINGULARITY_CACHEDIR=/tmp/CACHE
+
+singularity build --fakeroot bullseye.sif becon
+
+
+While performing build: while creating SIF: while unloading container: close /data/u_fbeyer_software/singularity_images/bullseye_container/bullseye.sif: disk quota exceeded
+
-- 
GitLab