From 1292aeb16d7ac06148d8ffcdc934ca37e32204e4 Mon Sep 17 00:00:00 2001 From: hamshkhawar Date: Mon, 1 Apr 2024 17:00:30 -0500 Subject: [PATCH 01/11] conditional_cwl_workflow --- README.md | 105 +++++++ configuration/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 174 bytes configuration/analysis/BBBC001.yml | 14 + configuration/analysis/BBBC039.yml | 13 + configuration/analysis/__init__.py | 0 configuration/analysis/sample.yml | 13 + configuration/segmentation/BBBC001.yml | 11 + configuration/segmentation/BBBC039.yml | 11 + configuration/segmentation/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 187 bytes configuration/segmentation/sample.yml | 12 + .../basic-flatfield-estimation.cwl | 0 .../bbbcdownload.cwl | 0 .../file-renaming.cwl | 0 .../image_assembler.cwl | 0 {cwl_adapters => cwl-adapters}/montage.cwl | 0 .../ome-converter.cwl | 0 .../precompute_slide.cwl | 0 pyproject.toml | 38 +++ src/polus/image/workflows/__init__.py | 0 src/polus/image/workflows/__main__.py | 65 ++++ .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 186 bytes .../__pycache__/__main__.cpython-310.pyc | Bin 0 -> 1693 bytes .../__pycache__/utils.cpython-310.pyc | Bin 0 -> 2785 bytes src/polus/image/workflows/utils.py | 68 +++++ workflows/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 170 bytes .../__pycache__/cwl_analysis.cpython-310.pyc | Bin 0 -> 9109 bytes .../cwl_nuclear_segmentation.cpython-310.pyc | Bin 0 -> 8690 bytes workflows/cwl_analysis.py | 289 ++++++++++++++++++ workflows/cwl_nuclear_segmentation.py | 262 ++++++++++++++++ 32 files changed, 901 insertions(+) create mode 100644 README.md create mode 100644 configuration/__init__.py create mode 100644 configuration/__pycache__/__init__.cpython-310.pyc create mode 100644 configuration/analysis/BBBC001.yml create mode 100644 configuration/analysis/BBBC039.yml create mode 100644 configuration/analysis/__init__.py create mode 100644 configuration/analysis/sample.yml create mode 100644 configuration/segmentation/BBBC001.yml create mode 100644 configuration/segmentation/BBBC039.yml create mode 100644 configuration/segmentation/__init__.py create mode 100644 configuration/segmentation/__pycache__/__init__.cpython-310.pyc create mode 100644 configuration/segmentation/sample.yml rename {cwl_adapters => cwl-adapters}/basic-flatfield-estimation.cwl (100%) rename {cwl_adapters => cwl-adapters}/bbbcdownload.cwl (100%) rename {cwl_adapters => cwl-adapters}/file-renaming.cwl (100%) rename {cwl_adapters => cwl-adapters}/image_assembler.cwl (100%) rename {cwl_adapters => cwl-adapters}/montage.cwl (100%) rename {cwl_adapters => cwl-adapters}/ome-converter.cwl (100%) rename {cwl_adapters => cwl-adapters}/precompute_slide.cwl (100%) create mode 100644 pyproject.toml create mode 100644 src/polus/image/workflows/__init__.py create mode 100644 src/polus/image/workflows/__main__.py create mode 100644 src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc create mode 100644 src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc create mode 100644 src/polus/image/workflows/__pycache__/utils.cpython-310.pyc create mode 100644 src/polus/image/workflows/utils.py create mode 100644 workflows/__init__.py create mode 100644 workflows/__pycache__/__init__.cpython-310.pyc create mode 100644 workflows/__pycache__/cwl_analysis.cpython-310.pyc create mode 100644 workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc create mode 100644 workflows/cwl_analysis.py create mode 100644 workflows/cwl_nuclear_segmentation.py diff --git a/README.md b/README.md new file mode 100644 index 0000000..a9bef17 --- /dev/null +++ b/README.md @@ -0,0 +1,105 @@ +# Common Workflow Language (CWL) Workflows + +CWL feature extraction workflow for imaging dataset + +## Workflow Steps: + +create a [Conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) environment using python = ">=3.9,<3.12" + +#### 1. Install polus-plugins. + +- clone a image-tools repository +`git clone https://github.com/camilovelezr/image-tools.git ../` +- cd `image-tools` +- create a new branch +`git checkout -b hd2 remotes/origin/hd2` +- `pip install .` + +#### 2. Install workflow-inference-compiler. +- clone a workflow-inference-compiler repository +`git clone https://github.com/camilovelezr/workflow-inference-compiler.git ../` +- cd `workflow-inference-compiler` +- create a new branch +`git checkout -b hd2 remotes/origin/hd2` +- `pip install -e ".[all]"` + +#### 3. Install image-workflow. +- cd `image-workflows` +- poetry install + +#### Note: +Ensure that the [docker-desktop](https://www.docker.com/products/docker-desktop/) is running in the background. To verify that it's operational, you can use the following command: +`docker run -d -p 80:80 docker/getting-started` +This command will launch the `docker/getting-started container` in detached mode (-d flag), exposing port 80 on your local machine (-p 80:80). It's a simple way to test if Docker Desktop is functioning correctly. + +## Details +This workflow integrates eight distinct plugins, starting from data retrieval from [Broad Bioimage Benchmark Collection](https://bbbc.broadinstitute.org/), renaming files, correcting uneven illumination, segmenting nuclear objects, and culminating in the extraction of features from identified objects + +Below are the specifics of the plugins employed in the workflow +1. [bbbc-download-plugin](https://github.com/saketprem/polus-plugins/tree/bbbc_download/utils/bbbc-download-plugin) +2. [file-renaming-tool](https://github.com/PolusAI/image-tools/tree/master/formats/file-renaming-tool) +3. [ome-converter-tool](https://github.com/PolusAI/image-tools/tree/master/formats/ome-converter-tool) +4. [basic-flatfield-estimation-tool](https://github.com/PolusAI/image-tools/tree/master/regression/basic-flatfield-estimation-tool) +5. [apply-flatfield-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/apply-flatfield-tool) +6. [kaggle-nuclei-segmentation](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/segmentation/kaggle-nuclei-segmentation) +7. [polus-ftl-label-plugin](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/transforms/images/polus-ftl-label-plugin) +8. [nyxus-plugin](https://github.com/PolusAI/image-tools/tree/kaggle-nuclei_seg/features/nyxus-plugin) + +## Execute CWL workflows +Three different CWL workflows can be executed for specific datasets +1. segmentation +2. analysis + +During the execution of the segmentation workflow, `1 to 7` plugins will be utilized. However, for executing the analysis workflow, `1 to 8` plugins will be employed. +If a user wishes to execute a workflow for a new dataset, they can utilize a sample YAML file to input parameter values. This YAML file can be saved in the desired subdirectory of the `configuration` folder with the name `dataset.yml` + +If a user opts to run a workflow without background correction, they can set `background_correction` to false. In this case, the workflow will skip steps `4 and 5` + +`python -m polus.image.workflows --name="BBBC001" --workflow=analysis` + +A directory named `outputs` is generated, encompassing CLTs for each plugin, YAML files, and all outputs are stored within the `outdir` directory. +``` +outputs +├── experiment +│ └── cwl_adapters +| experiment.cwl +| experiment.yml +| +└── outdir + └── experiment + ├── step 1 BbbcDownload + │ └── outDir + │ └── bbbc.outDir + │ └── BBBC + │ └── BBBC039 + │ └── raw + │ ├── Ground_Truth + │ │ ├── masks + │ │ └── metadata + │ └── Images + │ └── images + ├── step 2 FileRenaming + │ └── outDir + │ └── rename.outDir + ├── step 3 OmeConverter + │ └── outDir + │ └── ome_converter.outDir + ├── step 4 BasicFlatfieldEstimation + │ └── outDir + │ └── estimate_flatfield.outDir + ├── step 5 ApplyFlatfield + │ └── outDir + │ └── apply_flatfield.outDir + ├── step 6 KaggleNucleiSegmentation + │ └── outDir + │ └── kaggle_nuclei_segmentation.outDir + ├── step 7 FtlLabel + │ └── outDir + │ └── ftl_plugin.outDir + └── step 8 NyxusPlugin + └── outDir + └── nyxus_plugin.outDir + +``` +#### Note: +Step 7 and step 8 are executed only in the case of the `analysis` workflow. \ No newline at end of file diff --git a/configuration/__init__.py b/configuration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/__pycache__/__init__.cpython-310.pyc b/configuration/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c17568dadd0c07e40a8d279ac04f2ca270817e15 GIT binary patch literal 174 zcmd1j<>g`kf@}Y|(m?cM5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;x_uerR!OQL%nv zQc_}ZW`>czOMY@`ZfaghvA$P+lD=a_PHKF3eo?l5W^Q77s%|-unU<4ZUaX&-pO==I qURsn`l9`{UA0MBYmst`YuUAlci^C>2KczG$)edA;F%ytrVE_PUwJZn# literal 0 HcmV?d00001 diff --git a/configuration/analysis/BBBC001.yml b/configuration/analysis/BBBC001.yml new file mode 100644 index 0000000..7efe214 --- /dev/null +++ b/configuration/analysis/BBBC001.yml @@ -0,0 +1,14 @@ +--- +name : BBBC001 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif +ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +features: ALL +file_extension: pandas +background_correction: false + diff --git a/configuration/analysis/BBBC039.yml b/configuration/analysis/BBBC039.yml new file mode 100644 index 0000000..308a274 --- /dev/null +++ b/configuration/analysis/BBBC039.yml @@ -0,0 +1,13 @@ +--- +name : BBBC039 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif +ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +features: "ALL_INTENSITY" +file_extension: pandas +background_correction: false \ No newline at end of file diff --git a/configuration/analysis/__init__.py b/configuration/analysis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/analysis/sample.yml b/configuration/analysis/sample.yml new file mode 100644 index 0000000..47ffb02 --- /dev/null +++ b/configuration/analysis/sample.yml @@ -0,0 +1,13 @@ +--- +name : +file_pattern : +out_file_pattern : +image_pattern: +seg_pattern: +ff_pattern: +df_pattern: +group_by: +map_directory: +features: +file_extension: +background_correction: \ No newline at end of file diff --git a/configuration/segmentation/BBBC001.yml b/configuration/segmentation/BBBC001.yml new file mode 100644 index 0000000..4ed7653 --- /dev/null +++ b/configuration/segmentation/BBBC001.yml @@ -0,0 +1,11 @@ +--- +name : BBBC001 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif +ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/BBBC039.yml b/configuration/segmentation/BBBC039.yml new file mode 100644 index 0000000..1884878 --- /dev/null +++ b/configuration/segmentation/BBBC039.yml @@ -0,0 +1,11 @@ +--- +name : BBBC039 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif +ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/__init__.py b/configuration/segmentation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/segmentation/__pycache__/__init__.cpython-310.pyc b/configuration/segmentation/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..770a790643538f42c1f16a77aca1131d8b354f52 GIT binary patch literal 187 zcmd1j<>g`kf=BczOMY@`ZfaghvA$P+lD=a_PHKF3eo?l5W^Q77s%|-unU<4ZUaX&-pO==I wURsn`l9`{UU!0l_(gfke$7kkcmc+;F6;$5hu*uC&Da}c>1KC^51SD7(01w"] +readme = "README.md" +packages = [{include = "polus", from = "src"}] + +[tool.poetry.dependencies] +python = ">=3.9,<3.12" +typer = "^0.9.0" +pyyaml = "^6.0.1" +pydantic = "^2.6.1" +cwl-utils="0.31" +toil="^5.12" +polus-plugins = {path = "../image-tools", develop = true} +workflow-inference-compiler = {path = "../workflow-inference-compiler", develop = true} + +[tool.poetry.group.dev.dependencies] +jupyter = "^1.0.0" +nbconvert = "^7.11.0" +pytest = "^7.4.4" +bump2version = "^1.0.1" +pre-commit = "^3.3.3" +black = "^23.3.0" +ruff = "^0.0.274" +mypy = "^1.4.0" +pytest-xdist = "^3.3.1" +pytest-sugar = "^0.9.7" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +addopts = [ + "--import-mode=importlib", +] \ No newline at end of file diff --git a/src/polus/image/workflows/__init__.py b/src/polus/image/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/polus/image/workflows/__main__.py b/src/polus/image/workflows/__main__.py new file mode 100644 index 0000000..863f1ef --- /dev/null +++ b/src/polus/image/workflows/__main__.py @@ -0,0 +1,65 @@ +"""CWL Workflow.""" +import logging +import typer +from pathlib import Path +from polus.image.workflows.utils import LoadYaml +from workflows.cwl_analysis import CWLAnalysisWorkflow +from workflows.cwl_nuclear_segmentation import CWLSegmentationWorkflow +from pathlib import Path + + +app = typer.Typer() + +# Initialize the logger +logging.basicConfig( + format="%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s", + datefmt="%d-%b-%y %H:%M:%S", +) +logger = logging.getLogger("WIC Python API") +logger.setLevel(logging.INFO) + + +@app.command() +def main( + name: str = typer.Option( + ..., + "--name", + "-n", + help="Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets)" + ), + workflow: str = typer.Option( + ..., + "--workflow", + "-w", + help="Name of cwl workflow" + ) +) -> None: + + """Execute CWL Workflow.""" + + logger.info(f"name = {name}") + logger.info(f"workflow = {workflow}") + + config_path = Path(__file__).parent.parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") + print(config_path) + + + model = LoadYaml(workflow=workflow, config_path=config_path) + params = model.parse_yaml() + + if workflow == "analysis": + logger.info(f"Executing {workflow}!!!") + model = CWLAnalysisWorkflow(**params) + model.workflow() + + if workflow == "segmentation": + logger.info(f"Executing {workflow}!!!") + model = CWLSegmentationWorkflow(**params) + model.workflow() + + + logger.info("Completed CWL workflow!!!") + + +if __name__ == "__main__": + app() \ No newline at end of file diff --git a/src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc b/src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a9d53681adefe19e5647737bdc0f2bdfa0cd30b GIT binary patch literal 186 zcmd1j<>g`kf(jAtG!Xq5L?8o3AjbiSi&=m~3PUi1CZpdXD p=ad#hWb_d-@$s2?nI-Y@dIgoYIBatBQ%ZAE?LgKRGXV(}1_0d3FHHad literal 0 HcmV?d00001 diff --git a/src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc b/src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb595cd260f47e14d2642d5dc4890297fee6431c GIT binary patch literal 1693 zcmZuxOKa;!6xNKMwq@s?^in8F!6jbUg03#1Elu2(rlh%nw1f^6A#29A61~vKO)Q*M zx=sH;3E0q0|4r#q+pfCq>Rw83&&aWpz?GPp^PD;J%{LR*>s1N9>&;r<(9WbAv=+E#NfazSjroGStQi3QIrA>-T~Kybd+r0r&}-5guZulJkZ?l;d`xX}7>8oC+L zshhEWnB|T0qpo#2&W3Sh9h@F@a;@DK6Xv)b<(DTQS#jU;LwDdu0}HZT&N5*-Okq>j zp&tt$vkqD04MR7*w7PK+FsK+N+#6K~f+m=1clbu0$Wvn` z-6#`8b)d*mIkQpbR_^6;tqQIC(nR?U{esH)>I_H(@l}X--w|JXS9}>)6Z28?Olmdq ztuL>c2XI=?D46R49_v#puPqP~O2dnzzCRejdT>1U8)WB7XAHc=X$9aZYIDDn0B?!m zKF~C;&esqG$mQMLU1IVlRLx&><1h(W#%Ni@VnGNz`CV$6g%-qv0ZR+TkAQvRfC|HL z`hLJ1r_d5N1=17aBK9M(ZlNZrA7!Zwaij`f&YUsaom2rsS!OEEwWX%wU6~t95vztV zWkETi8}hU+yrO&aqy0T+DYxBT&*lE`WBZHP8;P67?Qi0qeQ*thoCD+LY+RC$+dTE` zBo0P=?y(m;#OJ^Y#>uMkb&1x13iunIo*N~vHN*? zYp2j1hsK`c5M7`I5EC^UCoCnp03+~wC1B+JvW`x9UpmV=I-|@F_#CS3M=M+uXK{EX zY~;h|&S>NX%uV6qJ-#?4szm*+TwHdOgwzCFUbmzSsSH?lT%KdvfISccF72^+eTZ^& z^3^M%dvO@L5hdEIM3AeHd6q7>E7<5bk7qKMHcK!*fpZ%L>@x%!KZZ_`05W7Qe?wMe mj4;yvt12eaWCaUN{K~(I_C$US@>{Iq*SK!}W3;MBdHX+m|JJAg literal 0 HcmV?d00001 diff --git a/src/polus/image/workflows/__pycache__/utils.cpython-310.pyc b/src/polus/image/workflows/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0142df99465563513486e966d45398f02b11068c GIT binary patch literal 2785 zcma)8TaVjB6rQo2#Bq}DQp)x!;pNgQ5+p4*Rn@BMLaPETt4iBa)dE?wo^j%?W2ZBd zwuyFM*h-WKgm^|sEpPoZ{D67oiNC-LoHN;MHZ4dvn&ab{bLR4$^PNfAY6S$I$Hy9J z4YWV-u9+Uj2`|we>HcVIq=Zb$LW@Fol5)^)8n0jjVVY@{i+ISi7P2tQ1O;~24vV-v*jpyVS! zFgDFNelTMBbVRzam~oYs5_83PTVr8s6qQEG;VQ*r!)$m1|-#a~hP4#Y@x+13_Q zS^tCX)iN0kAYQ8bWx3T|e+;YKE#-FCM&$iRpr(2GNOhG=y2CObsp&{}J~Gl-t~$f9 zT!K}wDcnpz-l4YV6{ry1D`@r%#2hM4BLu6$oi!q=pw$p6_;%sjzhCDr_ju(;M+Ce& zALq5Xo_aR5y*;`|qlWcy4*7Us>wyg>`_!fhJuM6jv?H^?qz-TL))OyU;zuAjOUA!h zGJcN@^2zD1S)M_B0N1cgLkyY{Ivm4T$M`cuOamy8W;TQSx7%>6p%kX_kmVy`s!~91 z7KsCApFJ2wY@b0>AA!(h2afQJJ|}8vhyDW0|JB)bw6o*v(eJ2scc}LE9C>1Dw+Ys; zQD<^&aeK&G)tR(IUJ4aLLWJURru1rfD;KbD=nj*TtZ-c2qlK7rC#p%sgaa!<5C|J)PBp-Ua(A4iB03>qXX3?JWQ|kw!ooDV- z@|^C{yHI~T;*mXKZ-k&%sXz+pfIH>o0i(U?mQDh(jI?@DRLl$iv=C zlMA<)glz0t;)PSPjpUS;SnyHx@uba1f~15hTpv9)R?1@Qt$z z$9oDFD;y7w`Lpu?3pCxZp@R%^I_C|I4So268k$Z6YSTSf%Yj>P=M67^fOBHXo*|QS z2ay}Xo<(d+#adn_ELWF1bD!Ijwe>s?bzg*F2Q*;EcB|#>%wTH`Z_7+x%iPzH#l|rtvqgUAJ1>GoBK8Dwn|{YTB|D z1A9h7Nu{Rc7SazHA%-^S6-$7Od@U;vz2sCG=K}CA~U*fgVUY4)n z)JIWZ#+X_h^D>EJTMFe0_G30p%cnmab5`gqiR$|Vhx*w_a;6l-wO?|H+mYu`U>?dh zP+UYYFA!G4v&fSmY?|US0l_UV)28Flv$R=v=n8$SW^LLnc?m2`4gO~J^K5Ismh15l zs4`v1?(sYrMZxtiZhwB~)A;uKb@?%L$WO4h{wc7<{tdA=x1ILi_GJ9E=^v8ISa=lP T2R><1{6SrJeb=W)-4*X2Jb~0G literal 0 HcmV?d00001 diff --git a/src/polus/image/workflows/utils.py b/src/polus/image/workflows/utils.py new file mode 100644 index 0000000..7daa9b7 --- /dev/null +++ b/src/polus/image/workflows/utils.py @@ -0,0 +1,68 @@ +import pydantic +from pathlib import Path +from typing import Dict +from typing import Union +import yaml + + +GITHUB_TAG = "https://raw.githubusercontent.com" + + +ANALYSIS_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "features", "file_extension", "background_correction"] +SEG_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "background_correction"] + + +class DataModel(pydantic.BaseModel): + data: Dict[str, Dict[str, Union[str, bool]]] + + +class LoadYaml(pydantic.BaseModel): + """Validation of Dataset yaml.""" + workflow:str + config_path: Union[str, Path] + + @pydantic.validator("config_path", pre=True) + @classmethod + def validate_path(cls, value: Union[str, Path]) -> Union[str, Path]: + """Validation of Paths.""" + if not Path(value).exists(): + msg = f"{value} does not exist! Please do check it again" + raise ValueError(msg) + if isinstance(value, str): + return Path(value) + return value + + @pydantic.validator("workflow", pre=True) + @classmethod + def validate_workflow_name(cls, value: str) -> str: + """Validation of workflow name.""" + if not value in ["analysis", "segmentation", "visualization"]: + msg = f"Please choose a valid workflow name i-e analysis segmentation visualization" + raise ValueError(msg) + return value + + def parse_yaml(self) -> Dict[str, Union[str, bool]]: + """Parsing yaml configuration file for each dataset.""" + + with open(f'{self.config_path}','r') as f: + data = yaml.safe_load(f) + + check_values = any([v for _, v in data.items() if f is None]) + + if check_values is True: + msg = f"All the parameters are not defined! Please do check it again" + raise ValueError(msg) + + + if self.workflow == "analysis": + if data['background_correction'] == True: + if list(data.keys()) != ANALYSIS_KEYS: + msg = f"Please do check parameters again for analysis workflow!!" + raise ValueError(msg) + + if self.workflow == "segmentation": + if data['background_correction'] == True: + if list(data.keys()) != SEG_KEYS: + msg = f"Please do check parameters again for segmentation workflow!!" + raise ValueError(msg) + return data diff --git a/workflows/__init__.py b/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/workflows/__pycache__/__init__.cpython-310.pyc b/workflows/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0064832c865029be90ae7eabacbc5d85f372cdc0 GIT binary patch literal 170 zcmd1j<>g`kf(jAtG!Xq5L?8o3AjbiSi&=m~3PUi1CZpd6 literal 0 HcmV?d00001 diff --git a/workflows/__pycache__/cwl_analysis.cpython-310.pyc b/workflows/__pycache__/cwl_analysis.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..886cc84acde030a78fb94a6c089a3ebda1592515 GIT binary patch literal 9109 zcma)C%ahwidIvxd1TiG%q0!T_4B6hbD0@gh*4dQeTJKo0WI57UYmXxvQ3?yPn?sC1 zfNFpm4a2E&m7LpdHs|cRij&*k_OyRTPN@n`smjT>RP82loW#oSYY=>lM!Vq@x*Og7 zeckx_d-p`us-@r;zP8l4{JNt2E4|GB3V8W3p7`%kFomgs(o;1>ReGAEs##B=r#nU2 z)&iqfa!Qka(=jDP*(uA~ax7UdIZLvxI2Bn}ohq;H*2c@MaG2rLgO%Q@v#Kh*JYM4` z4pnEJua8gi^6n{`aYJE6W_+nIM`!2R^XRFt3+#n26{pHXWyTiR? zAh^vRCBkbbeiYh~uyZK8@ArA&hrD5BFi9f()*#{W75PqhLa)c|EA|#uQOEXs-i{yc z*vw13m?zZro`^hVzvo9Z#_jib*zWc`ao@fg1p&_%z0^&Tetf0bY_(eLMvEZ)Fi!jg zi!~y#)0D6-Fvj)9bnT8GaJTOz2^S&u05*}W4nCppiJ0~wygi$y(Kh25WHd~t$<@slXIQ0%M@rL3-OifnNUo2 zzqS8p5ZhA*-SLy|pw+-PHt$5iAl|&$3~5HQj_(B5gd9|viPH=_@hbX>F{1E_{m~%l z4?s0^PZ(}GwdJ+%(-(xyZAaipDI!~{i1z$` zJJso2)5<)rrz=OfKdmlsb-FaicBwUEhSZv!TWaP+POA&tm6|yr)0$j%`qAmh+1BU2 ztX+k+&VSS<3w0Gw{0PNJ9VxCd(om|S0!nRUpe&3^DD{zvvX~gV^kydIk+N&pQevNBS})kEbT!PoFCk93qPqaw=Hk%e+?w1o1+2(a$@2=m>OOo0N`wuZ}+EE{_h z5*q}y#KQ&#~5B*~t%%}_0Mc>Wk8*2s~hhTt$? z;Z|D(ru;|jqvMe*^dmID6v|j7hB#11>aND4@~CD_yawLr2WaL^-3_FFWP3Be?Zv~huHL_AhF&=SbcZgFfJ67fn@Kh~}(e|7Gj zc2GFbM}=K;TpnSzcqOsMOCyjLIP_`no3q{rXGSW)1G)-GIfr4sxHdQEhxsa)Ra=<# z8D>==&FUcz^1+9!D39vTt_Iu-C#DU?97@y+lZBJp;UO0=mt_7zTMR-u9b_Axo&wLE z^u>!8Q;R?9bKw*3*LA1r4U%Yrg)_%&d+jaDDKX)fdr2SopBb#fzZj7G%;0 zqMaQsQr!<>e&l(|Q!fntzBq+RX~_rU07=FYq-MZ4A&d85Z&D-f4iY~AzxNWsd8+rY zLcJ(i#93h+o>C|s0I4D1ar(lQ9#5+JsmhM3t%8iKH3)* z^jNC)MGfy2)Vel2cRXVeq6J7<&aSRy!ubYuEXrkCb?I!}iKPD&Gh))c79_Ny)`*nq z@ZzzgoQG=@74sjp4LnjhXB1@`c)u{#4i%D5L|Ph2=ok{g5X6j&kWiJPs=v|BDm$Pu zC|ZIli^7LoBz8X-z){cKcaWwHHz7b7^TyD)D#uYRlAf|eDeT)(Dbt8S#4;AtTq zw9+EH^MI4W>IYsMJV{0^Ee`sSl$=14fJX=f=A*T`z!#I6KBrw!pT%4>ae)*Ix}8`z zV=htxq`k!oh@y>&80s&Tub{n|j?XR*&)q;Q;@L6c7l?i4g_A@_44-?iaj9{+jsWv= zL%a=~@~feM{{(G_9`{7Mn--5jTKbhz-@h-ZY@?<6V4KnQU zAV_9iZyqJ|Mb~4@C5KXO+HG+tR*DA6!|L*w#}9!sCVxgR)Ij%&rm5PKQ=0aj_CzlZ z-#+#P{us`*Fc6CzNfJ`RBhTbxG~{`Zuc0O6PZS=j2L<%OY>ta~H{g~+RH5#m>|iRV zy}!h1eg!cFq@EkVgW`zHnl`+BBQsiv`=I6~vfIXK#hC<8Hz9H<{VY02;e-6b?UYcD-KQz2EirJTdXciL!B%0pgh; z^|y}f`cacYj&@V<9cTh##jImvnq}A^rw5;ap{KWf_nF&YDDSEV?iTJ*^vlH-*Z~)iJNBrEJ&&xCyMmOe%p5FZDe}F2TB-jYN77vcu ze8x!A8OIU)<}W|%i=!-u|zLYayT=CW<%HNXq%`BI8) zuib_`4~v({0yzZ~sVbam$P;qTH|6NIQxW{Z0Lf69Y;dYu^P`S|rLOT#s(~(O5_U9MX?h^tt>ns=RDP;J`YoU-|GcwIrci-CH0-^Y|!hc ztJi^HD@v|YMo;cz7HmxXpH#)?&_byJk?Ht5sfhqWW~h(yvDK4DK-TvwEFDwavJSJT zA-J$qU8_7X^~#eoa z$V3Wd1Hj|pX;(`SeLO~D1=+ZK2HbCQfG`6tkBfXA@^hMtT-`2m1Fi*0&`~b|QU|0q z2U(Pm$nzn#2NXF=uE7kHRX9&@Z6#C*Pz8e8t%;vaS2Z)|2{{O46I^8Vc9HFqtFNG5 zWo49WSQGg{v}U_ql5j-{S41X|;EKov>cCTUPvZR)>eHytpgs!>6<{d3xH>|4Zn}O| z;-RmtiC>M1fHvHv5PQyA60hMJfUg1GnA^d!ge;8=%q+P}Yz5_WY!&5s%wEIn(%fv^ zo`JeV&6@Z}xsEwq2dz;FSWNeMxt{4>K>Y&VZPXW0Uqt;P>MC1D`4T&c@@1@Y3agl| zo_ufoeRf)AZGivpR6%jMMp_0rQv+F7bH_X=hvuX=>oE?>$t&YaqcS@K>~#bp*2vnu z4DEGRrqY!0tB24+BWwIxR_|_L3|C=U5^mg(y~?;rwcNpZ+5vkWTIa&>2h+^>=CdxX zW*1fVV*F<}4rl2@PI+wrR`fQScz+&mA=I`|^4iFRh*y9SSFL5_F2tTdF6w$+bIM4LL6fz; zKyZu?e{dByI{_z!`3#iJ>D6J5ys=e(UHmzq#Woc(BS1P+=3eMj$Z|SMr2Ot=fu_u? zJ2e~-DbBo;GTcrDHw}QvF3B7NNp!-nNkh{8z{h3xD>4@@DDff_^G-=d@9*t96$CWb z@|z@J=X(#?Lv~ zLMl%9L+P1rcj3Kualz&wjWAg$OmdWz!*?tUyAyi*Cezte3+;u?$&LMQ(b5AJYr z+i%0h&di65x*Rj7wM(fu4KE8ZmXyoI8^lw}fik`{`+ zDR(GHGz77}Aodo-#zOKau^{4i1ZGHXli8-5 ziGJ!ev=uGur-c7(jvP~U!01nmf~A_6m5tMN0kx>4-uy4?x6q?etUvc>e4|xn@A|hZ z<%$MKt)`j453~7ZL4;A=6=0To)Ac3$l+86mb#tR{bPhZxHERH?>^k0oedI zByL)9-3J3Nn2eNM_{?qB6*mBs8p%FgPp0NaeF|kfM8hovVZz3s%<<1oDGR`f*#+f( z+;PbKo4hk1wLv@#ruL0l*8bD9wEr-dw0}1%+V9P(^*hV9Oly73G>c|=y|!jq%e51NR;izH zR&n>$zmSK)IgXU6`A&AcKcZY$OiAH2g;M1?FH%kKR-vM<75)!7>IJX> literal 0 HcmV?d00001 diff --git a/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc b/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60bc5be35bc33f9a93503daaec5c9beefec275b4 GIT binary patch literal 8690 zcma)B>5m&ncJFRBo6R1Qb7*vo@H;+Gvp#4ze-LaH+ezxvXov zJX+z$4)o1czB)S2%eyD2$90Vrne|v>)+1x{B-3wc_0q4fK5e6}OY;*y?FeUlGNEMJC2fr(g}7Fr3E$H z5;xl>JH^&eQ(>psnaA2@l|9Fv$GgT}!2C;mad!!QUSux%EVHw!j<8;0FQaA!Bfi33 zMd=ty-(jzzw91x2rq|`k%XeLJ&=;ChlTOs#_ zJKNXo1w9@H5pOtI&omXmc0c9GdG$@$k>BO+d3S?eaoY{L{!S3>xXe%egr`(>NyI*L zF9k8RarY9BTAi*h?z)%bFyt*-lzXm|roH5Rv$?&!-D+$TMi3=wkYch%EOwd-*TaZO zy)l`)9faKL`Dw~Ugf&1ARH+>cx95w5)*<{om%7n1lPPB0Prbv~g15O4oVmd7`YFAp zsFOLfg?#&@L6q_+3DSK<4n7^l_1jpXD`+{iy1fpM+`d|9*Y9~O5THja_9wF?eCHV( z>?hn!W3@-_qRWK2hv%D&GdDU&<=*srJz4?y(~d)iMG=K2{oCz)+Vk%1&U6QuGovx@ z_QH4{#KJlfPHc4Bp`W$`9x}HTcYARJiA&tJh`Vk|BebssV#)wEo#6jFK7-8sf$=Bn z-w|=Y=WXwwcdt{4c6#3zfxjJctZB^HvAj%>w*A&!LX4Q#iXjO~G|#(NAMh5$(4978 zgL$(;mfAIG68&UlR;c9CVKI}{Iq{JTGyIj#l<3l#(ns3)W@L4ajq><(MKcR-EkM2H zKPr<-zls1FN(#;^=n7+Qelumo5f+JIJC+NF0nb%xq*d3WKFKB_SN zfbgqBZB#qZwrG?^JWE3paCuk+Tp6OjcZ_M!KE}piQEhmAZ>)8a`l={l8Nwz|Ca_4L zLZC{Z1|SQ5FOX&L&WD%1PcGj6K-zmjiv+SG3~IJ;x*=WC-i?EZs$m4E+cB>txS#UL zK?-;x?bcqXaAh@LMK%-}LO}!)O91r+X+n_OhnF|)HNVDe-Guo|O`O5&w_h|rOSnjy z{`R(?1f6%9SK?Nmj%L#QDBf;fd;n!d+Bef>Q<60EWhG5oYLqEr=(_2-ZuIu0?Rh~I zq@MQ@CQD8OSXE1({%w6#UsB(WJ|LBTYi0nNm$ZH(A4FP$r+pD^v~-M=|439!JW5>N zLjkPUMmn+mp+3}i4MzXMPYgY8F%R@n;Xr$63=P#<{E3b>eQ2}|2!J(s>9XR8o2xc( zM4dk}IXE4?ewxS%ZQEyl54thAT{mPY<1cu}9|TG2#dl?C+_|ocvshQ%6lBiDaR6ED z-o@~;BzO{s_j$b_h&tjFfi(ieo(w!krIP@Ef+r!~R*V|RR?%&J@Wzouds?jK$VMb9 z;vw=S{{WDViKfV8j0(&k0&*i$9v_^BP8R#=UBDy*@y%1)OmG0L;2~o|V83d+>wH zF!Iq@@x#_Z*ZEO8$DJJ>aRFC>j6>v#ex$mCG=q~9<{0c4wKU?rV? z8iY80-BfU{RtqtYSh5#Dy^&B$vkL-2!bytd665fLBIUa&y#=2V8>0E8&ay47(>x^F z((=fQi>{)aya3=B72VWNLAopI@94%?HB~acGL69t-@f0cSVKatbi*H`a9GGml)qS1 zdIEw?A}`jIsKR9I+DZ%^R)83qgA2EXA0=?Navd=B6QJ`a;rSok!cRvT}D}@ITU59 z9&yDRtDzsEM?wa{f%>TEHIUWR2d^GU&RN_R30wZ7vW4dmRT%{RpfEBHG*TXjlMGTH zBXbDlv8K_1M+q^}Um9!L4k!((mY{3n=sp*z+Y9?}!ZX(#}z{7+B}L;d=cI zsk?%v6{P)tMPQ?DNked1Nc!8d2+up@q)~gJ-{R6FLo18@9*%iVBt^t|%}U}&bNxBS zOXwJ%HeS%bO~|P8963mX5~3o9M^iCOeaXPVdhV(!%S9F)&(ovRPeZPn|b;?~yNTki~vEhY{4StK)qlGpl+)_L|YlO>|FC$Sb~lih|+^IYvfG&QAeW zyns4*!lNR-VX0xq4Onw1zY%=wy!Pkd=+`*1z%)|_HX2-fSR59?C1Y^&dS-nQ-$Ct- z&v6SJOS02E38!X{c+%so0Fe=Map(!J3lral*%+7&xK^-DMz1HUIM`kIj~OP;c?y>a z>LNk>Eu9^nw+Irtv0X}hs5}gj`BKW9Eu&SMw^H7#TNz(Jp&C1%Q`WKxynJZof2VOA z!oLRAVID7xeJg4kV5Ecs0LO6P&7RWDCr$x+_2r}ZID;!lyq?k}laq7}t*MsLUcHU4 zajNiL7{AT&yEuMZ<9F$6gW10*k@gwtYZTR#2QPp2>5YWEqw7KXL4R90v}1D&0ZY#ll81iqH-J_i}#`sX^tkj16kdAUN`R9%n%y8NgNIStl9CqN#}0I z-}A*dgdnOWO$s^lQ$^)GyU@g8fey<$tmn1sgfutu){!G4j^V8si6f+dgcoNmeO|HW4scRXK^A|FQ~}Azd(LnOqsun0phl z%SA<_nTpfd-m=&ln_61l-^nn@MzGoe@ITk?*?V|c6%sX6`ZlI2v>$h4sc!T2WWF};R zfG%V;;wgF57gg(RSrPnRADKy&%8=EK*;Z*`sw=!L%@01ld38{2b|77uy7L3Md^NiN zBVT+v#TRdaBO8PFuL=(+fK^y812KW4n?~h{ZC0LC z3N`(?C&pLL{l@&N;viahV*ICBgTsDAmS#HWoNSX^PkayuTZLCak4n%$W+~8uWP3)6 zpy3f>W@Pm89`Kma0~xzrcqbmReY+*RDGO1?+rfD-@m>I?2~1-Kv#2nUD;!k_i~JhT zVisT(ehxfQg;fGpfv|RK;%_Ij+L@n&?1ecry#=%`;!U$7XXq`nGT;j4bbxKn%(nBn5ChGo+WRAEdick%Ye_J_X>KKJTrZM^a8_mmCEzf#J^DD zi*x`oq1LdpQ+jAerEwj77a!W`iz9bvvsKJ|7QTFWSl)dJdgZuEn`xt$51=20<f!}beg!EMir)rRWifF3lGS-sXE$)X8)?uUtj<}HywjKHbbGM&v`NW3_%zCiL*^xK z+@{8}N1K|wUkm+IAh&}8e*gCVV0mhU#=Qp350NbmvP2fn*|RUW^1LXL2I5r=h5N)Z z!hEqOkg&OiG*TG}FlY~CeFS-@2<(tE3OOk$8uXbo&eIW%jruvU1uVe{(8(R=ug5ec z?^G_3qWL6?zf?wC*3e8!DzBsrxvU`f58h<=Lr8W-^d&x|CS~BSK2Uc~aip@@f>cG8 zOE0Ow_R_wrz{kIm-=ScH?hZzf4&LL5WKmtxsARk>x7+#rl$S>uE&rA_UGCl9@8P-w z>M^2wlKZ&q!hHntR3aEChwXL;PF)9AOH=JCRP12qG*>AXS@l$q;fJ9LH1=sOC#zHX zyF^Jk5JP;QcJu>)x|3zbOcGg&A}P5iNIu1v1W3jN$&w&JK@f__0;%MjD~B|=_&ap^ zXFSP|0hWq3)b`TE6G7RmA*oKama$}H-ibBWN44oDaLgxG!O?BQh5?{4C^_%wj$!Er z1*Vg~^^LKRf0@5qDp!DE8#RMGAQ+8rN#Ez2fT2?9n+doEVHK3|+o&I>s{*2l=Yf() zTd_WF9z0p`ynB5=9JiD_c!({}6SsgQt#qF*$)x>pkGyRk!P++B1o1o?Rla;m{wzKu z!4n@7Af}2t1g4@+TvUXeA@(GdCW>e!hcCGSU~8ssoS0DK*Pztzje_yN$fEp@Q8a#M zSjKPdlJSjg8^5v3#(&$6@n80W^K0jfV>`<$wq3N#tF;x!sn(9Ek?W^6mvLRxz!eg% zP0|i_(+e`uD^I> None: + """Create directories for CWL outputs""" + cwl_path = self.PATH.joinpath("cwl_adapters") + cwl_path.mkdir(parents=True, exist_ok=True) + workflow_path = self.PATH.joinpath("outputs").resolve() + workflow_path.mkdir(exist_ok=True) + return cwl_path, workflow_path + + def _clean(self) -> None: + """Cleaning of redundant directories generating on running CWL""" + logger.info("Cleaning directories!!!") + destination_path = self.workflow_path.joinpath("experiment") + dir_names = ("autogenerated", "cachedir", "RUNS", "provenance") + for i, d in zip(self.wic_path.iterdir(), self.PATH.iterdir()): + if i.name.endswith(dir_names): + shutil.rmtree(d) + if d.name.endswith(dir_names): + shutil.rmtree(d) + + for d in destination_path.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.rmtree(d) + for d in self.PATH.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.move(d, destination_path) + + return + + def _move_outputs(self) -> None: + """Transfer outputs from the WIC directory to the workflow path""" + logger.info("Move outputs to workflow path!!!") + for d in self.wic_path.iterdir(): + if d.name.endswith("outdir"): + shutil.move(d, self.workflow_path) + return + + def _camel(self, name: str) -> str: + """Convert plugin name to camel case.""" + name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + return "".join([name[0].upper(), name[1:]]) + + def _string_after_period(self, x): + """Get a string after period.""" + match = re.search(r"\.(.*)", x) + if match: + # Get the part after the period + return f".*.{match.group(1)}" + else: + return "" + + def _add_backslash_before_parentheses(self, x): + """Add backslash to generate ff_pattern and df_pattern""" + # Define the regular expression pattern to match parenthesis + pattern_1 = r"(\()|(\))" + # Use re.sub() to add a backslash before starting and finishing parenthesis + result = re.sub(pattern_1, r"\\\1\2", x) + pattern_2 = r"\d" + result = ( + result.split("_c")[0] + + "_c{c:d}" + + re.sub(pattern_2, "", result.split("_c")[1]) + ) + return result + + def create_step(self, url: str) -> api.Step: + """Generate the plugin class name from the plugin name specified in the manifest""" + manifest = pp.submit_plugin(url) + plugin_version = str(manifest.version) + cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( + self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + ) + step = api.Step(cwl_tool) + return step + + def manifest_urls(self, x: str) -> str: + """URLs on GitHub for plugin manifests""" + + urls = { + "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", + "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", + "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", + "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", + "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/nyxus_manifest/features/nyxus-plugin/plugin.json", + } + return urls[x] + + def modify_cwl(self) -> None: + """Modify CWL to incorporate environmental variables and permission access""" + for f in list(self.cwl_path.rglob("*.cwl")): + if "cwl" in f.name: + try: + with Path.open(f, "r") as file: + config = yaml.safe_load(file) + config["requirements"]["NetworkAccess"] = { + "networkAccess": True + } + config["requirements"]["EnvVarRequirement"] = { + "envDef": {"HOME": "/home/polusai"} + } + with open(f, "w") as out_file: + yaml.dump(config, out_file) + except FileNotFoundError: + logger.info("Error: There was an unexpected error while processing the file.") + return + + def workflow(self) -> None: + """ + A CWL feature extraction pipeline. + """ + # BBBCDownload + bbbc = self.create_step(self.manifest_urls("bbbc_download")) + bbbc.name = self.name + bbbc.outDir = Path("bbbc.outDir") + + # Renaming plugin + rename = self.create_step(self.manifest_urls("file_renaming")) + rename.filePattern = self.file_pattern + rename.outFilePattern = self.out_file_pattern + rename.mapDirectory = self.map_directory + rename.inpDir = bbbc.outDir + rename.outDir = Path("rename.outDir") + + # OMEConverter + ome_converter = self.create_step(self.manifest_urls("ome_converter")) + ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + ome_converter.fileExtension = ".ome.tif" + ome_converter.inpDir = rename.outDir + ome_converter.outDir = Path("ome_converter.outDir") + + if self.background_correction: + # Estimate Flatfield + estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield.inpDir = ome_converter.outDir + estimate_flatfield.filePattern = self.image_pattern + estimate_flatfield.groupBy = self.group_by + estimate_flatfield.getDarkfield = True + estimate_flatfield.outDir = Path("estimate_flatfield.outDir") + + # # Apply Flatfield + apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield.imgDir = ome_converter.outDir + apply_flatfield.imgPattern = self.image_pattern + apply_flatfield.ffDir = estimate_flatfield.outDir + apply_flatfield.ffPattern = self.ff_pattern + apply_flatfield.dfPattern = self.df_pattern + apply_flatfield.outDir = Path("apply_flatfield.outDir") + apply_flatfield.dataType = True + + ## Kaggle Nuclei Segmentation + kaggle_nuclei_segmentation = self.create_step( + self.manifest_urls("kaggle_nuclei_segmentation") + ) + if self.background_correction: + kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir + else: + kaggle_nuclei_segmentation.inpDir = ome_converter.outDir + kaggle_nuclei_segmentation.filePattern = self.image_pattern + kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + + ## FTL Label Plugin + ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) + ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir + ftl_plugin.connectivity = 1 + ftl_plugin.binarizationThreshold = 0.5 + ftl_plugin.outDir = Path("ftl_plugin.outDir") + + # # ## Nyxus Plugin + nyxus_plugin = self.create_step(self.manifest_urls("nyxus_plugin")) + if self.background_correction: + nyxus_plugin.inpDir = apply_flatfield.outDir + else: + nyxus_plugin.inpDir = ome_converter.outDir + nyxus_plugin.segDir = ftl_plugin.outDir + nyxus_plugin.intPattern = self.image_pattern + nyxus_plugin.segPattern = self.seg_pattern + nyxus_plugin.features = self.features + nyxus_plugin.fileExtension = self.file_extension + nyxus_plugin.neighborDist = 5 + nyxus_plugin.pixelPerMicron = 1.0 + nyxus_plugin.outDir = Path("nyxus_plugin.outDir") + + logger.info("Initiating CWL Feature Extraction Workflow!!!") + if self.background_correction: + steps = [ + bbbc, + rename, + ome_converter, + estimate_flatfield, + apply_flatfield, + kaggle_nuclei_segmentation, + ftl_plugin, + nyxus_plugin + ] + else: + steps = [ + bbbc, + rename, + ome_converter, + kaggle_nuclei_segmentation, + ftl_plugin, + nyxus_plugin + ] + + workflow = api.Workflow(steps, "experiment", self.workflow_path) + # # Saving CLT for plugins + workflow._save_all_cwl(overwrite=True) + # # Adding environmental variables for bbbc_download and ome_converter plugin + self.modify_cwl() + # # # Save yaml to run CWL tool + workflow._save_yaml() + # Compile and run using WIC python API + workflow.compile(run_local=True, overwrite=False) + # # print(workflow.yml_path) + # # clean autognerated directories + self._clean() + self._move_outputs() + logger.info("Completed CWL Feature Extraction /Analysis Workflow.") + return + \ No newline at end of file diff --git a/workflows/cwl_nuclear_segmentation.py b/workflows/cwl_nuclear_segmentation.py new file mode 100644 index 0000000..d7d264b --- /dev/null +++ b/workflows/cwl_nuclear_segmentation.py @@ -0,0 +1,262 @@ +import wic.api.pythonapi as api +import polus.plugins as pp +from pathlib import Path +import yaml +import logging +import typing +import re +import shutil +import sys +sys.path.append('../') +from polus.image.workflows.utils import GITHUB_TAG + +# Initialize the logger +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class CWLSegmentationWorkflow: + """ + A CWL Nuclear Segmentation pipeline. + + Attributes: + name : Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets). + file_pattern : Pattern for parsing raw filenames. + out_file_pattern : Preferred format for filenames + image_pattern : Pattern for parsing intensity image filenames after renaming when using map_directory + seg_pattern : Pattern use to parse segmentation image filenames + map_directory : Mapping of folder name + ff_pattern: The filename pattern employed to select flatfield components from the ffDir. + df_pattern:The filename pattern employed to select darkfield components from the ffDir + group_by: Grouping variables for filePattern + background_correction: Execute background correction + """ + def __init__( + self, + name: str, + file_pattern: str, + out_file_pattern: str, + image_pattern: str, + seg_pattern: str, + ff_pattern: typing.Optional[str] = '', + df_pattern: typing.Optional[str] = '', + group_by: typing.Optional[str] = '', + map_directory: typing.Optional[bool] = False, + background_correction: typing.Optional[bool] = False, + ): + self.name = name + self.file_pattern = file_pattern + self.out_file_pattern = out_file_pattern + self.map_directory = map_directory + self.ff_pattern = ff_pattern + self.df_pattern = df_pattern + self.group_by = group_by + self.wic_path = api._WIC_PATH + self.PATH = Path(self.wic_path.parent).joinpath("image-workflows") + self.cwl_path, self.workflow_path = self._create_directories() + self.image_pattern = image_pattern + self.seg_pattern = seg_pattern + self.background_correction = background_correction + + def _create_directories(self) -> None: + """Create directories for CWL outputs""" + cwl_path = self.PATH.joinpath("cwl_adapters") + cwl_path.mkdir(parents=True, exist_ok=True) + workflow_path = self.PATH.joinpath("outputs").resolve() + workflow_path.mkdir(exist_ok=True) + return cwl_path, workflow_path + + def _clean(self) -> None: + """Cleaning of redundant directories generating on running CWL""" + logger.info("Cleaning directories!!!") + destination_path = self.workflow_path.joinpath("experiment") + dir_names = ("autogenerated", "cachedir", "RUNS", "provenance", "cwl_adapters") + dir_list = [w for w in self.wic_path.iterdir() if w.is_dir() if w.name in dir_names] + for d in dir_list: + shutil.rmtree(d) + for d in destination_path.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.rmtree(d) + for d in self.PATH.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.move(d, destination_path) + + return + + def _move_outputs(self) -> None: + """Transfer outputs from the WIC directory to the workflow path""" + logger.info("Move outputs to workflow path!!!") + for d in self.wic_path.iterdir(): + if d.name.endswith("outdir"): + shutil.move(d, self.workflow_path) + return + + def _camel(self, name: str) -> str: + """Convert plugin name to camel case.""" + name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + return "".join([name[0].upper(), name[1:]]) + + def _string_after_period(self, x): + """Get a string after period.""" + match = re.search(r"\.(.*)", x) + if match: + # Get the part after the period + return f".*.{match.group(1)}" + else: + return "" + + def _add_backslash_before_parentheses(self, x): + """Add backslash to generate ff_pattern and df_pattern""" + # Define the regular expression pattern to match parenthesis + pattern_1 = r"(\()|(\))" + # Use re.sub() to add a backslash before starting and finishing parenthesis + result = re.sub(pattern_1, r"\\\1\2", x) + pattern_2 = r"\d" + result = ( + result.split("_c")[0] + + "_c{c:d}" + + re.sub(pattern_2, "", result.split("_c")[1]) + ) + return result + + def create_step(self, url: str) -> api.Step: + """Generate the plugin class name from the plugin name specified in the manifest""" + manifest = pp.submit_plugin(url) + plugin_version = str(manifest.version) + cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( + self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + ) + step = api.Step(cwl_tool) + return step + + def manifest_urls(self, x: str) -> str: + """URLs on GitHub for plugin manifests""" + urls = { + "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", + "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", + "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", + "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json" + } + return urls[x] + + def modify_cwl(self) -> None: + """Modify CWL to incorporate environmental variables and permission access""" + for f in list(self.cwl_path.rglob("*.cwl")): + if "cwl" in f.name: + try: + with Path.open(f, "r") as file: + config = yaml.safe_load(file) + config["requirements"]["NetworkAccess"] = { + "networkAccess": True + } + config["requirements"]["EnvVarRequirement"] = { + "envDef": {"HOME": "/home/polusai"} + } + with open(f, "w") as out_file: + yaml.dump(config, out_file) + except FileNotFoundError: + logger.info("Error: There was an unexpected error while processing the file.") + return + + def workflow(self) -> None: + """ + A CWL nuclear segmentation pipeline. + """ + # BBBCDownload + bbbc = self.create_step(self.manifest_urls("bbbc_download")) + bbbc.name = self.name + bbbc.outDir = Path("bbbc.outDir") + + # Renaming plugin + rename = self.create_step(self.manifest_urls("file_renaming")) + rename.filePattern = self.file_pattern + rename.outFilePattern = self.out_file_pattern + rename.mapDirectory = self.map_directory + rename.inpDir = bbbc.outDir + rename.outDir = Path("rename.outDir") + + + # OMEConverter + ome_converter = self.create_step(self.manifest_urls("ome_converter")) + ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + ome_converter.fileExtension = ".ome.tif" + ome_converter.inpDir = rename.outDir + ome_converter.outDir = Path("ome_converter.outDir") + + if self.background_correction: + # Estimate Flatfield + estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield.inpDir = ome_converter.outDir + estimate_flatfield.filePattern = self.image_pattern + estimate_flatfield.groupBy = self.group_by + estimate_flatfield.getDarkfield = True + estimate_flatfield.outDir = Path("estimate_flatfield.outDir") + + # # Apply Flatfield + apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield.imgDir = ome_converter.outDir + apply_flatfield.imgPattern = self.image_pattern + apply_flatfield.ffDir = estimate_flatfield.outDir + apply_flatfield.ffPattern = self.ff_pattern + apply_flatfield.dfPattern = self.df_pattern + apply_flatfield.outDir = Path("apply_flatfield.outDir") + apply_flatfield.dataType = True + + ## Kaggle Nuclei Segmentation + kaggle_nuclei_segmentation = self.create_step( + self.manifest_urls("kaggle_nuclei_segmentation") + ) + if self.background_correction: + kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir + else: + kaggle_nuclei_segmentation.inpDir = ome_converter.outDir + + kaggle_nuclei_segmentation.filePattern = self.image_pattern + kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + + ## FTL Label Plugin + ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) + ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir + ftl_plugin.connectivity = 1 + ftl_plugin.binarizationThreshold = 0.5 + ftl_plugin.outDir = Path("ftl_plugin.outDir") + + logger.info("Initiating CWL Nuclear Segmentation Workflow!!!") + if self.background_correction: + steps = [ + bbbc, + rename, + ome_converter, + estimate_flatfield, + apply_flatfield, + kaggle_nuclei_segmentation, + ftl_plugin + ] + else: + steps = [ + bbbc, + rename, + ome_converter, + kaggle_nuclei_segmentation, + ftl_plugin] + + + + workflow = api.Workflow(steps, "experiment", self.workflow_path) + # # Saving CLT for plugins + workflow._save_all_cwl(overwrite=True) + # # Adding environmental variables for bbbc_download and ome_converter plugin + self.modify_cwl() + # # # Save yaml to run CWL tool + workflow._save_yaml() + # Compile and run using WIC python API + workflow.compile(run_local=True, overwrite=False) + # # print(workflow.yml_path) + # # clean autognerated directories + self._clean() + self._move_outputs() + logger.info("Completed CWL nuclear segmentation workflow.") + return \ No newline at end of file From 6e79e8640d1e8e0288d764f269068f6d45447984 Mon Sep 17 00:00:00 2001 From: hamshkhawar Date: Mon, 1 Apr 2024 17:08:08 -0500 Subject: [PATCH 02/11] rebased --- README.md | 105 ------- configuration/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 174 -> 0 bytes configuration/analysis/BBBC001.yml | 14 - configuration/analysis/BBBC039.yml | 13 - configuration/analysis/__init__.py | 0 configuration/analysis/sample.yml | 13 - configuration/segmentation/BBBC001.yml | 11 - configuration/segmentation/BBBC039.yml | 11 - configuration/segmentation/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 187 -> 0 bytes configuration/segmentation/sample.yml | 12 - .../basic-flatfield-estimation.cwl | 0 .../bbbcdownload.cwl | 0 .../file-renaming.cwl | 0 .../image_assembler.cwl | 0 {cwl-adapters => cwl_adapters}/montage.cwl | 0 .../ome-converter.cwl | 0 .../precompute_slide.cwl | 0 pyproject.toml | 38 --- src/polus/image/workflows/__init__.py | 0 src/polus/image/workflows/__main__.py | 65 ---- .../__pycache__/__init__.cpython-310.pyc | Bin 186 -> 0 bytes .../__pycache__/__main__.cpython-310.pyc | Bin 1693 -> 0 bytes .../__pycache__/utils.cpython-310.pyc | Bin 2785 -> 0 bytes src/polus/image/workflows/utils.py | 68 ----- workflows/__init__.py | 0 .../__pycache__/__init__.cpython-310.pyc | Bin 170 -> 0 bytes .../__pycache__/cwl_analysis.cpython-310.pyc | Bin 9109 -> 0 bytes .../cwl_nuclear_segmentation.cpython-310.pyc | Bin 8690 -> 0 bytes workflows/cwl_analysis.py | 289 ------------------ workflows/cwl_nuclear_segmentation.py | 262 ---------------- 32 files changed, 901 deletions(-) delete mode 100644 README.md delete mode 100644 configuration/__init__.py delete mode 100644 configuration/__pycache__/__init__.cpython-310.pyc delete mode 100644 configuration/analysis/BBBC001.yml delete mode 100644 configuration/analysis/BBBC039.yml delete mode 100644 configuration/analysis/__init__.py delete mode 100644 configuration/analysis/sample.yml delete mode 100644 configuration/segmentation/BBBC001.yml delete mode 100644 configuration/segmentation/BBBC039.yml delete mode 100644 configuration/segmentation/__init__.py delete mode 100644 configuration/segmentation/__pycache__/__init__.cpython-310.pyc delete mode 100644 configuration/segmentation/sample.yml rename {cwl-adapters => cwl_adapters}/basic-flatfield-estimation.cwl (100%) rename {cwl-adapters => cwl_adapters}/bbbcdownload.cwl (100%) rename {cwl-adapters => cwl_adapters}/file-renaming.cwl (100%) rename {cwl-adapters => cwl_adapters}/image_assembler.cwl (100%) rename {cwl-adapters => cwl_adapters}/montage.cwl (100%) rename {cwl-adapters => cwl_adapters}/ome-converter.cwl (100%) rename {cwl-adapters => cwl_adapters}/precompute_slide.cwl (100%) delete mode 100644 pyproject.toml delete mode 100644 src/polus/image/workflows/__init__.py delete mode 100644 src/polus/image/workflows/__main__.py delete mode 100644 src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc delete mode 100644 src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc delete mode 100644 src/polus/image/workflows/__pycache__/utils.cpython-310.pyc delete mode 100644 src/polus/image/workflows/utils.py delete mode 100644 workflows/__init__.py delete mode 100644 workflows/__pycache__/__init__.cpython-310.pyc delete mode 100644 workflows/__pycache__/cwl_analysis.cpython-310.pyc delete mode 100644 workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc delete mode 100644 workflows/cwl_analysis.py delete mode 100644 workflows/cwl_nuclear_segmentation.py diff --git a/README.md b/README.md deleted file mode 100644 index a9bef17..0000000 --- a/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Common Workflow Language (CWL) Workflows - -CWL feature extraction workflow for imaging dataset - -## Workflow Steps: - -create a [Conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) environment using python = ">=3.9,<3.12" - -#### 1. Install polus-plugins. - -- clone a image-tools repository -`git clone https://github.com/camilovelezr/image-tools.git ../` -- cd `image-tools` -- create a new branch -`git checkout -b hd2 remotes/origin/hd2` -- `pip install .` - -#### 2. Install workflow-inference-compiler. -- clone a workflow-inference-compiler repository -`git clone https://github.com/camilovelezr/workflow-inference-compiler.git ../` -- cd `workflow-inference-compiler` -- create a new branch -`git checkout -b hd2 remotes/origin/hd2` -- `pip install -e ".[all]"` - -#### 3. Install image-workflow. -- cd `image-workflows` -- poetry install - -#### Note: -Ensure that the [docker-desktop](https://www.docker.com/products/docker-desktop/) is running in the background. To verify that it's operational, you can use the following command: -`docker run -d -p 80:80 docker/getting-started` -This command will launch the `docker/getting-started container` in detached mode (-d flag), exposing port 80 on your local machine (-p 80:80). It's a simple way to test if Docker Desktop is functioning correctly. - -## Details -This workflow integrates eight distinct plugins, starting from data retrieval from [Broad Bioimage Benchmark Collection](https://bbbc.broadinstitute.org/), renaming files, correcting uneven illumination, segmenting nuclear objects, and culminating in the extraction of features from identified objects - -Below are the specifics of the plugins employed in the workflow -1. [bbbc-download-plugin](https://github.com/saketprem/polus-plugins/tree/bbbc_download/utils/bbbc-download-plugin) -2. [file-renaming-tool](https://github.com/PolusAI/image-tools/tree/master/formats/file-renaming-tool) -3. [ome-converter-tool](https://github.com/PolusAI/image-tools/tree/master/formats/ome-converter-tool) -4. [basic-flatfield-estimation-tool](https://github.com/PolusAI/image-tools/tree/master/regression/basic-flatfield-estimation-tool) -5. [apply-flatfield-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/apply-flatfield-tool) -6. [kaggle-nuclei-segmentation](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/segmentation/kaggle-nuclei-segmentation) -7. [polus-ftl-label-plugin](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/transforms/images/polus-ftl-label-plugin) -8. [nyxus-plugin](https://github.com/PolusAI/image-tools/tree/kaggle-nuclei_seg/features/nyxus-plugin) - -## Execute CWL workflows -Three different CWL workflows can be executed for specific datasets -1. segmentation -2. analysis - -During the execution of the segmentation workflow, `1 to 7` plugins will be utilized. However, for executing the analysis workflow, `1 to 8` plugins will be employed. -If a user wishes to execute a workflow for a new dataset, they can utilize a sample YAML file to input parameter values. This YAML file can be saved in the desired subdirectory of the `configuration` folder with the name `dataset.yml` - -If a user opts to run a workflow without background correction, they can set `background_correction` to false. In this case, the workflow will skip steps `4 and 5` - -`python -m polus.image.workflows --name="BBBC001" --workflow=analysis` - -A directory named `outputs` is generated, encompassing CLTs for each plugin, YAML files, and all outputs are stored within the `outdir` directory. -``` -outputs -├── experiment -│ └── cwl_adapters -| experiment.cwl -| experiment.yml -| -└── outdir - └── experiment - ├── step 1 BbbcDownload - │ └── outDir - │ └── bbbc.outDir - │ └── BBBC - │ └── BBBC039 - │ └── raw - │ ├── Ground_Truth - │ │ ├── masks - │ │ └── metadata - │ └── Images - │ └── images - ├── step 2 FileRenaming - │ └── outDir - │ └── rename.outDir - ├── step 3 OmeConverter - │ └── outDir - │ └── ome_converter.outDir - ├── step 4 BasicFlatfieldEstimation - │ └── outDir - │ └── estimate_flatfield.outDir - ├── step 5 ApplyFlatfield - │ └── outDir - │ └── apply_flatfield.outDir - ├── step 6 KaggleNucleiSegmentation - │ └── outDir - │ └── kaggle_nuclei_segmentation.outDir - ├── step 7 FtlLabel - │ └── outDir - │ └── ftl_plugin.outDir - └── step 8 NyxusPlugin - └── outDir - └── nyxus_plugin.outDir - -``` -#### Note: -Step 7 and step 8 are executed only in the case of the `analysis` workflow. \ No newline at end of file diff --git a/configuration/__init__.py b/configuration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configuration/__pycache__/__init__.cpython-310.pyc b/configuration/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index c17568dadd0c07e40a8d279ac04f2ca270817e15..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 174 zcmd1j<>g`kf@}Y|(m?cM5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;x_uerR!OQL%nv zQc_}ZW`>czOMY@`ZfaghvA$P+lD=a_PHKF3eo?l5W^Q77s%|-unU<4ZUaX&-pO==I qURsn`l9`{UA0MBYmst`YuUAlci^C>2KczG$)edA;F%ytrVE_PUwJZn# diff --git a/configuration/analysis/BBBC001.yml b/configuration/analysis/BBBC001.yml deleted file mode 100644 index 7efe214..0000000 --- a/configuration/analysis/BBBC001.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -name : BBBC001 -file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif -out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif -image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif -seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif -ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" -df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" -group_by: c -map_directory: false -features: ALL -file_extension: pandas -background_correction: false - diff --git a/configuration/analysis/BBBC039.yml b/configuration/analysis/BBBC039.yml deleted file mode 100644 index 308a274..0000000 --- a/configuration/analysis/BBBC039.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -name : BBBC039 -file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif -out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif -image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif -seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif -ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" -df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" -group_by: c -map_directory: false -features: "ALL_INTENSITY" -file_extension: pandas -background_correction: false \ No newline at end of file diff --git a/configuration/analysis/__init__.py b/configuration/analysis/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configuration/analysis/sample.yml b/configuration/analysis/sample.yml deleted file mode 100644 index 47ffb02..0000000 --- a/configuration/analysis/sample.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -name : -file_pattern : -out_file_pattern : -image_pattern: -seg_pattern: -ff_pattern: -df_pattern: -group_by: -map_directory: -features: -file_extension: -background_correction: \ No newline at end of file diff --git a/configuration/segmentation/BBBC001.yml b/configuration/segmentation/BBBC001.yml deleted file mode 100644 index 4ed7653..0000000 --- a/configuration/segmentation/BBBC001.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -name : BBBC001 -file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif -out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif -image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif -seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif -ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" -df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" -group_by: c -map_directory: false -background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/BBBC039.yml b/configuration/segmentation/BBBC039.yml deleted file mode 100644 index 1884878..0000000 --- a/configuration/segmentation/BBBC039.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -name : BBBC039 -file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif -out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif -image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif -seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif -ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" -df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" -group_by: c -map_directory: false -background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/__init__.py b/configuration/segmentation/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configuration/segmentation/__pycache__/__init__.cpython-310.pyc b/configuration/segmentation/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 770a790643538f42c1f16a77aca1131d8b354f52..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 187 zcmd1j<>g`kf=BczOMY@`ZfaghvA$P+lD=a_PHKF3eo?l5W^Q77s%|-unU<4ZUaX&-pO==I wURsn`l9`{UU!0l_(gfke$7kkcmc+;F6;$5hu*uC&Da}c>1KC^51SD7(01w"] -readme = "README.md" -packages = [{include = "polus", from = "src"}] - -[tool.poetry.dependencies] -python = ">=3.9,<3.12" -typer = "^0.9.0" -pyyaml = "^6.0.1" -pydantic = "^2.6.1" -cwl-utils="0.31" -toil="^5.12" -polus-plugins = {path = "../image-tools", develop = true} -workflow-inference-compiler = {path = "../workflow-inference-compiler", develop = true} - -[tool.poetry.group.dev.dependencies] -jupyter = "^1.0.0" -nbconvert = "^7.11.0" -pytest = "^7.4.4" -bump2version = "^1.0.1" -pre-commit = "^3.3.3" -black = "^23.3.0" -ruff = "^0.0.274" -mypy = "^1.4.0" -pytest-xdist = "^3.3.1" -pytest-sugar = "^0.9.7" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.pytest.ini_options] -addopts = [ - "--import-mode=importlib", -] \ No newline at end of file diff --git a/src/polus/image/workflows/__init__.py b/src/polus/image/workflows/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/polus/image/workflows/__main__.py b/src/polus/image/workflows/__main__.py deleted file mode 100644 index 863f1ef..0000000 --- a/src/polus/image/workflows/__main__.py +++ /dev/null @@ -1,65 +0,0 @@ -"""CWL Workflow.""" -import logging -import typer -from pathlib import Path -from polus.image.workflows.utils import LoadYaml -from workflows.cwl_analysis import CWLAnalysisWorkflow -from workflows.cwl_nuclear_segmentation import CWLSegmentationWorkflow -from pathlib import Path - - -app = typer.Typer() - -# Initialize the logger -logging.basicConfig( - format="%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s", - datefmt="%d-%b-%y %H:%M:%S", -) -logger = logging.getLogger("WIC Python API") -logger.setLevel(logging.INFO) - - -@app.command() -def main( - name: str = typer.Option( - ..., - "--name", - "-n", - help="Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets)" - ), - workflow: str = typer.Option( - ..., - "--workflow", - "-w", - help="Name of cwl workflow" - ) -) -> None: - - """Execute CWL Workflow.""" - - logger.info(f"name = {name}") - logger.info(f"workflow = {workflow}") - - config_path = Path(__file__).parent.parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") - print(config_path) - - - model = LoadYaml(workflow=workflow, config_path=config_path) - params = model.parse_yaml() - - if workflow == "analysis": - logger.info(f"Executing {workflow}!!!") - model = CWLAnalysisWorkflow(**params) - model.workflow() - - if workflow == "segmentation": - logger.info(f"Executing {workflow}!!!") - model = CWLSegmentationWorkflow(**params) - model.workflow() - - - logger.info("Completed CWL workflow!!!") - - -if __name__ == "__main__": - app() \ No newline at end of file diff --git a/src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc b/src/polus/image/workflows/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 1a9d53681adefe19e5647737bdc0f2bdfa0cd30b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmd1j<>g`kf(jAtG!Xq5L?8o3AjbiSi&=m~3PUi1CZpdXD p=ad#hWb_d-@$s2?nI-Y@dIgoYIBatBQ%ZAE?LgKRGXV(}1_0d3FHHad diff --git a/src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc b/src/polus/image/workflows/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index cb595cd260f47e14d2642d5dc4890297fee6431c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1693 zcmZuxOKa;!6xNKMwq@s?^in8F!6jbUg03#1Elu2(rlh%nw1f^6A#29A61~vKO)Q*M zx=sH;3E0q0|4r#q+pfCq>Rw83&&aWpz?GPp^PD;J%{LR*>s1N9>&;r<(9WbAv=+E#NfazSjroGStQi3QIrA>-T~Kybd+r0r&}-5guZulJkZ?l;d`xX}7>8oC+L zshhEWnB|T0qpo#2&W3Sh9h@F@a;@DK6Xv)b<(DTQS#jU;LwDdu0}HZT&N5*-Okq>j zp&tt$vkqD04MR7*w7PK+FsK+N+#6K~f+m=1clbu0$Wvn` z-6#`8b)d*mIkQpbR_^6;tqQIC(nR?U{esH)>I_H(@l}X--w|JXS9}>)6Z28?Olmdq ztuL>c2XI=?D46R49_v#puPqP~O2dnzzCRejdT>1U8)WB7XAHc=X$9aZYIDDn0B?!m zKF~C;&esqG$mQMLU1IVlRLx&><1h(W#%Ni@VnGNz`CV$6g%-qv0ZR+TkAQvRfC|HL z`hLJ1r_d5N1=17aBK9M(ZlNZrA7!Zwaij`f&YUsaom2rsS!OEEwWX%wU6~t95vztV zWkETi8}hU+yrO&aqy0T+DYxBT&*lE`WBZHP8;P67?Qi0qeQ*thoCD+LY+RC$+dTE` zBo0P=?y(m;#OJ^Y#>uMkb&1x13iunIo*N~vHN*? zYp2j1hsK`c5M7`I5EC^UCoCnp03+~wC1B+JvW`x9UpmV=I-|@F_#CS3M=M+uXK{EX zY~;h|&S>NX%uV6qJ-#?4szm*+TwHdOgwzCFUbmzSsSH?lT%KdvfISccF72^+eTZ^& z^3^M%dvO@L5hdEIM3AeHd6q7>E7<5bk7qKMHcK!*fpZ%L>@x%!KZZ_`05W7Qe?wMe mj4;yvt12eaWCaUN{K~(I_C$US@>{Iq*SK!}W3;MBdHX+m|JJAg diff --git a/src/polus/image/workflows/__pycache__/utils.cpython-310.pyc b/src/polus/image/workflows/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 0142df99465563513486e966d45398f02b11068c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2785 zcma)8TaVjB6rQo2#Bq}DQp)x!;pNgQ5+p4*Rn@BMLaPETt4iBa)dE?wo^j%?W2ZBd zwuyFM*h-WKgm^|sEpPoZ{D67oiNC-LoHN;MHZ4dvn&ab{bLR4$^PNfAY6S$I$Hy9J z4YWV-u9+Uj2`|we>HcVIq=Zb$LW@Fol5)^)8n0jjVVY@{i+ISi7P2tQ1O;~24vV-v*jpyVS! zFgDFNelTMBbVRzam~oYs5_83PTVr8s6qQEG;VQ*r!)$m1|-#a~hP4#Y@x+13_Q zS^tCX)iN0kAYQ8bWx3T|e+;YKE#-FCM&$iRpr(2GNOhG=y2CObsp&{}J~Gl-t~$f9 zT!K}wDcnpz-l4YV6{ry1D`@r%#2hM4BLu6$oi!q=pw$p6_;%sjzhCDr_ju(;M+Ce& zALq5Xo_aR5y*;`|qlWcy4*7Us>wyg>`_!fhJuM6jv?H^?qz-TL))OyU;zuAjOUA!h zGJcN@^2zD1S)M_B0N1cgLkyY{Ivm4T$M`cuOamy8W;TQSx7%>6p%kX_kmVy`s!~91 z7KsCApFJ2wY@b0>AA!(h2afQJJ|}8vhyDW0|JB)bw6o*v(eJ2scc}LE9C>1Dw+Ys; zQD<^&aeK&G)tR(IUJ4aLLWJURru1rfD;KbD=nj*TtZ-c2qlK7rC#p%sgaa!<5C|J)PBp-Ua(A4iB03>qXX3?JWQ|kw!ooDV- z@|^C{yHI~T;*mXKZ-k&%sXz+pfIH>o0i(U?mQDh(jI?@DRLl$iv=C zlMA<)glz0t;)PSPjpUS;SnyHx@uba1f~15hTpv9)R?1@Qt$z z$9oDFD;y7w`Lpu?3pCxZp@R%^I_C|I4So268k$Z6YSTSf%Yj>P=M67^fOBHXo*|QS z2ay}Xo<(d+#adn_ELWF1bD!Ijwe>s?bzg*F2Q*;EcB|#>%wTH`Z_7+x%iPzH#l|rtvqgUAJ1>GoBK8Dwn|{YTB|D z1A9h7Nu{Rc7SazHA%-^S6-$7Od@U;vz2sCG=K}CA~U*fgVUY4)n z)JIWZ#+X_h^D>EJTMFe0_G30p%cnmab5`gqiR$|Vhx*w_a;6l-wO?|H+mYu`U>?dh zP+UYYFA!G4v&fSmY?|US0l_UV)28Flv$R=v=n8$SW^LLnc?m2`4gO~J^K5Ismh15l zs4`v1?(sYrMZxtiZhwB~)A;uKb@?%L$WO4h{wc7<{tdA=x1ILi_GJ9E=^v8ISa=lP T2R><1{6SrJeb=W)-4*X2Jb~0G diff --git a/src/polus/image/workflows/utils.py b/src/polus/image/workflows/utils.py deleted file mode 100644 index 7daa9b7..0000000 --- a/src/polus/image/workflows/utils.py +++ /dev/null @@ -1,68 +0,0 @@ -import pydantic -from pathlib import Path -from typing import Dict -from typing import Union -import yaml - - -GITHUB_TAG = "https://raw.githubusercontent.com" - - -ANALYSIS_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "features", "file_extension", "background_correction"] -SEG_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "background_correction"] - - -class DataModel(pydantic.BaseModel): - data: Dict[str, Dict[str, Union[str, bool]]] - - -class LoadYaml(pydantic.BaseModel): - """Validation of Dataset yaml.""" - workflow:str - config_path: Union[str, Path] - - @pydantic.validator("config_path", pre=True) - @classmethod - def validate_path(cls, value: Union[str, Path]) -> Union[str, Path]: - """Validation of Paths.""" - if not Path(value).exists(): - msg = f"{value} does not exist! Please do check it again" - raise ValueError(msg) - if isinstance(value, str): - return Path(value) - return value - - @pydantic.validator("workflow", pre=True) - @classmethod - def validate_workflow_name(cls, value: str) -> str: - """Validation of workflow name.""" - if not value in ["analysis", "segmentation", "visualization"]: - msg = f"Please choose a valid workflow name i-e analysis segmentation visualization" - raise ValueError(msg) - return value - - def parse_yaml(self) -> Dict[str, Union[str, bool]]: - """Parsing yaml configuration file for each dataset.""" - - with open(f'{self.config_path}','r') as f: - data = yaml.safe_load(f) - - check_values = any([v for _, v in data.items() if f is None]) - - if check_values is True: - msg = f"All the parameters are not defined! Please do check it again" - raise ValueError(msg) - - - if self.workflow == "analysis": - if data['background_correction'] == True: - if list(data.keys()) != ANALYSIS_KEYS: - msg = f"Please do check parameters again for analysis workflow!!" - raise ValueError(msg) - - if self.workflow == "segmentation": - if data['background_correction'] == True: - if list(data.keys()) != SEG_KEYS: - msg = f"Please do check parameters again for segmentation workflow!!" - raise ValueError(msg) - return data diff --git a/workflows/__init__.py b/workflows/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/workflows/__pycache__/__init__.cpython-310.pyc b/workflows/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 0064832c865029be90ae7eabacbc5d85f372cdc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 170 zcmd1j<>g`kf(jAtG!Xq5L?8o3AjbiSi&=m~3PUi1CZpd6 diff --git a/workflows/__pycache__/cwl_analysis.cpython-310.pyc b/workflows/__pycache__/cwl_analysis.cpython-310.pyc deleted file mode 100644 index 886cc84acde030a78fb94a6c089a3ebda1592515..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9109 zcma)C%ahwidIvxd1TiG%q0!T_4B6hbD0@gh*4dQeTJKo0WI57UYmXxvQ3?yPn?sC1 zfNFpm4a2E&m7LpdHs|cRij&*k_OyRTPN@n`smjT>RP82loW#oSYY=>lM!Vq@x*Og7 zeckx_d-p`us-@r;zP8l4{JNt2E4|GB3V8W3p7`%kFomgs(o;1>ReGAEs##B=r#nU2 z)&iqfa!Qka(=jDP*(uA~ax7UdIZLvxI2Bn}ohq;H*2c@MaG2rLgO%Q@v#Kh*JYM4` z4pnEJua8gi^6n{`aYJE6W_+nIM`!2R^XRFt3+#n26{pHXWyTiR? zAh^vRCBkbbeiYh~uyZK8@ArA&hrD5BFi9f()*#{W75PqhLa)c|EA|#uQOEXs-i{yc z*vw13m?zZro`^hVzvo9Z#_jib*zWc`ao@fg1p&_%z0^&Tetf0bY_(eLMvEZ)Fi!jg zi!~y#)0D6-Fvj)9bnT8GaJTOz2^S&u05*}W4nCppiJ0~wygi$y(Kh25WHd~t$<@slXIQ0%M@rL3-OifnNUo2 zzqS8p5ZhA*-SLy|pw+-PHt$5iAl|&$3~5HQj_(B5gd9|viPH=_@hbX>F{1E_{m~%l z4?s0^PZ(}GwdJ+%(-(xyZAaipDI!~{i1z$` zJJso2)5<)rrz=OfKdmlsb-FaicBwUEhSZv!TWaP+POA&tm6|yr)0$j%`qAmh+1BU2 ztX+k+&VSS<3w0Gw{0PNJ9VxCd(om|S0!nRUpe&3^DD{zvvX~gV^kydIk+N&pQevNBS})kEbT!PoFCk93qPqaw=Hk%e+?w1o1+2(a$@2=m>OOo0N`wuZ}+EE{_h z5*q}y#KQ&#~5B*~t%%}_0Mc>Wk8*2s~hhTt$? z;Z|D(ru;|jqvMe*^dmID6v|j7hB#11>aND4@~CD_yawLr2WaL^-3_FFWP3Be?Zv~huHL_AhF&=SbcZgFfJ67fn@Kh~}(e|7Gj zc2GFbM}=K;TpnSzcqOsMOCyjLIP_`no3q{rXGSW)1G)-GIfr4sxHdQEhxsa)Ra=<# z8D>==&FUcz^1+9!D39vTt_Iu-C#DU?97@y+lZBJp;UO0=mt_7zTMR-u9b_Axo&wLE z^u>!8Q;R?9bKw*3*LA1r4U%Yrg)_%&d+jaDDKX)fdr2SopBb#fzZj7G%;0 zqMaQsQr!<>e&l(|Q!fntzBq+RX~_rU07=FYq-MZ4A&d85Z&D-f4iY~AzxNWsd8+rY zLcJ(i#93h+o>C|s0I4D1ar(lQ9#5+JsmhM3t%8iKH3)* z^jNC)MGfy2)Vel2cRXVeq6J7<&aSRy!ubYuEXrkCb?I!}iKPD&Gh))c79_Ny)`*nq z@ZzzgoQG=@74sjp4LnjhXB1@`c)u{#4i%D5L|Ph2=ok{g5X6j&kWiJPs=v|BDm$Pu zC|ZIli^7LoBz8X-z){cKcaWwHHz7b7^TyD)D#uYRlAf|eDeT)(Dbt8S#4;AtTq zw9+EH^MI4W>IYsMJV{0^Ee`sSl$=14fJX=f=A*T`z!#I6KBrw!pT%4>ae)*Ix}8`z zV=htxq`k!oh@y>&80s&Tub{n|j?XR*&)q;Q;@L6c7l?i4g_A@_44-?iaj9{+jsWv= zL%a=~@~feM{{(G_9`{7Mn--5jTKbhz-@h-ZY@?<6V4KnQU zAV_9iZyqJ|Mb~4@C5KXO+HG+tR*DA6!|L*w#}9!sCVxgR)Ij%&rm5PKQ=0aj_CzlZ z-#+#P{us`*Fc6CzNfJ`RBhTbxG~{`Zuc0O6PZS=j2L<%OY>ta~H{g~+RH5#m>|iRV zy}!h1eg!cFq@EkVgW`zHnl`+BBQsiv`=I6~vfIXK#hC<8Hz9H<{VY02;e-6b?UYcD-KQz2EirJTdXciL!B%0pgh; z^|y}f`cacYj&@V<9cTh##jImvnq}A^rw5;ap{KWf_nF&YDDSEV?iTJ*^vlH-*Z~)iJNBrEJ&&xCyMmOe%p5FZDe}F2TB-jYN77vcu ze8x!A8OIU)<}W|%i=!-u|zLYayT=CW<%HNXq%`BI8) zuib_`4~v({0yzZ~sVbam$P;qTH|6NIQxW{Z0Lf69Y;dYu^P`S|rLOT#s(~(O5_U9MX?h^tt>ns=RDP;J`YoU-|GcwIrci-CH0-^Y|!hc ztJi^HD@v|YMo;cz7HmxXpH#)?&_byJk?Ht5sfhqWW~h(yvDK4DK-TvwEFDwavJSJT zA-J$qU8_7X^~#eoa z$V3Wd1Hj|pX;(`SeLO~D1=+ZK2HbCQfG`6tkBfXA@^hMtT-`2m1Fi*0&`~b|QU|0q z2U(Pm$nzn#2NXF=uE7kHRX9&@Z6#C*Pz8e8t%;vaS2Z)|2{{O46I^8Vc9HFqtFNG5 zWo49WSQGg{v}U_ql5j-{S41X|;EKov>cCTUPvZR)>eHytpgs!>6<{d3xH>|4Zn}O| z;-RmtiC>M1fHvHv5PQyA60hMJfUg1GnA^d!ge;8=%q+P}Yz5_WY!&5s%wEIn(%fv^ zo`JeV&6@Z}xsEwq2dz;FSWNeMxt{4>K>Y&VZPXW0Uqt;P>MC1D`4T&c@@1@Y3agl| zo_ufoeRf)AZGivpR6%jMMp_0rQv+F7bH_X=hvuX=>oE?>$t&YaqcS@K>~#bp*2vnu z4DEGRrqY!0tB24+BWwIxR_|_L3|C=U5^mg(y~?;rwcNpZ+5vkWTIa&>2h+^>=CdxX zW*1fVV*F<}4rl2@PI+wrR`fQScz+&mA=I`|^4iFRh*y9SSFL5_F2tTdF6w$+bIM4LL6fz; zKyZu?e{dByI{_z!`3#iJ>D6J5ys=e(UHmzq#Woc(BS1P+=3eMj$Z|SMr2Ot=fu_u? zJ2e~-DbBo;GTcrDHw}QvF3B7NNp!-nNkh{8z{h3xD>4@@DDff_^G-=d@9*t96$CWb z@|z@J=X(#?Lv~ zLMl%9L+P1rcj3Kualz&wjWAg$OmdWz!*?tUyAyi*Cezte3+;u?$&LMQ(b5AJYr z+i%0h&di65x*Rj7wM(fu4KE8ZmXyoI8^lw}fik`{`+ zDR(GHGz77}Aodo-#zOKau^{4i1ZGHXli8-5 ziGJ!ev=uGur-c7(jvP~U!01nmf~A_6m5tMN0kx>4-uy4?x6q?etUvc>e4|xn@A|hZ z<%$MKt)`j453~7ZL4;A=6=0To)Ac3$l+86mb#tR{bPhZxHERH?>^k0oedI zByL)9-3J3Nn2eNM_{?qB6*mBs8p%FgPp0NaeF|kfM8hovVZz3s%<<1oDGR`f*#+f( z+;PbKo4hk1wLv@#ruL0l*8bD9wEr-dw0}1%+V9P(^*hV9Oly73G>c|=y|!jq%e51NR;izH zR&n>$zmSK)IgXU6`A&AcKcZY$OiAH2g;M1?FH%kKR-vM<75)!7>IJX> diff --git a/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc b/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc deleted file mode 100644 index 60bc5be35bc33f9a93503daaec5c9beefec275b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8690 zcma)B>5m&ncJFRBo6R1Qb7*vo@H;+Gvp#4ze-LaH+ezxvXov zJX+z$4)o1czB)S2%eyD2$90Vrne|v>)+1x{B-3wc_0q4fK5e6}OY;*y?FeUlGNEMJC2fr(g}7Fr3E$H z5;xl>JH^&eQ(>psnaA2@l|9Fv$GgT}!2C;mad!!QUSux%EVHw!j<8;0FQaA!Bfi33 zMd=ty-(jzzw91x2rq|`k%XeLJ&=;ChlTOs#_ zJKNXo1w9@H5pOtI&omXmc0c9GdG$@$k>BO+d3S?eaoY{L{!S3>xXe%egr`(>NyI*L zF9k8RarY9BTAi*h?z)%bFyt*-lzXm|roH5Rv$?&!-D+$TMi3=wkYch%EOwd-*TaZO zy)l`)9faKL`Dw~Ugf&1ARH+>cx95w5)*<{om%7n1lPPB0Prbv~g15O4oVmd7`YFAp zsFOLfg?#&@L6q_+3DSK<4n7^l_1jpXD`+{iy1fpM+`d|9*Y9~O5THja_9wF?eCHV( z>?hn!W3@-_qRWK2hv%D&GdDU&<=*srJz4?y(~d)iMG=K2{oCz)+Vk%1&U6QuGovx@ z_QH4{#KJlfPHc4Bp`W$`9x}HTcYARJiA&tJh`Vk|BebssV#)wEo#6jFK7-8sf$=Bn z-w|=Y=WXwwcdt{4c6#3zfxjJctZB^HvAj%>w*A&!LX4Q#iXjO~G|#(NAMh5$(4978 zgL$(;mfAIG68&UlR;c9CVKI}{Iq{JTGyIj#l<3l#(ns3)W@L4ajq><(MKcR-EkM2H zKPr<-zls1FN(#;^=n7+Qelumo5f+JIJC+NF0nb%xq*d3WKFKB_SN zfbgqBZB#qZwrG?^JWE3paCuk+Tp6OjcZ_M!KE}piQEhmAZ>)8a`l={l8Nwz|Ca_4L zLZC{Z1|SQ5FOX&L&WD%1PcGj6K-zmjiv+SG3~IJ;x*=WC-i?EZs$m4E+cB>txS#UL zK?-;x?bcqXaAh@LMK%-}LO}!)O91r+X+n_OhnF|)HNVDe-Guo|O`O5&w_h|rOSnjy z{`R(?1f6%9SK?Nmj%L#QDBf;fd;n!d+Bef>Q<60EWhG5oYLqEr=(_2-ZuIu0?Rh~I zq@MQ@CQD8OSXE1({%w6#UsB(WJ|LBTYi0nNm$ZH(A4FP$r+pD^v~-M=|439!JW5>N zLjkPUMmn+mp+3}i4MzXMPYgY8F%R@n;Xr$63=P#<{E3b>eQ2}|2!J(s>9XR8o2xc( zM4dk}IXE4?ewxS%ZQEyl54thAT{mPY<1cu}9|TG2#dl?C+_|ocvshQ%6lBiDaR6ED z-o@~;BzO{s_j$b_h&tjFfi(ieo(w!krIP@Ef+r!~R*V|RR?%&J@Wzouds?jK$VMb9 z;vw=S{{WDViKfV8j0(&k0&*i$9v_^BP8R#=UBDy*@y%1)OmG0L;2~o|V83d+>wH zF!Iq@@x#_Z*ZEO8$DJJ>aRFC>j6>v#ex$mCG=q~9<{0c4wKU?rV? z8iY80-BfU{RtqtYSh5#Dy^&B$vkL-2!bytd665fLBIUa&y#=2V8>0E8&ay47(>x^F z((=fQi>{)aya3=B72VWNLAopI@94%?HB~acGL69t-@f0cSVKatbi*H`a9GGml)qS1 zdIEw?A}`jIsKR9I+DZ%^R)83qgA2EXA0=?Navd=B6QJ`a;rSok!cRvT}D}@ITU59 z9&yDRtDzsEM?wa{f%>TEHIUWR2d^GU&RN_R30wZ7vW4dmRT%{RpfEBHG*TXjlMGTH zBXbDlv8K_1M+q^}Um9!L4k!((mY{3n=sp*z+Y9?}!ZX(#}z{7+B}L;d=cI zsk?%v6{P)tMPQ?DNked1Nc!8d2+up@q)~gJ-{R6FLo18@9*%iVBt^t|%}U}&bNxBS zOXwJ%HeS%bO~|P8963mX5~3o9M^iCOeaXPVdhV(!%S9F)&(ovRPeZPn|b;?~yNTki~vEhY{4StK)qlGpl+)_L|YlO>|FC$Sb~lih|+^IYvfG&QAeW zyns4*!lNR-VX0xq4Onw1zY%=wy!Pkd=+`*1z%)|_HX2-fSR59?C1Y^&dS-nQ-$Ct- z&v6SJOS02E38!X{c+%so0Fe=Map(!J3lral*%+7&xK^-DMz1HUIM`kIj~OP;c?y>a z>LNk>Eu9^nw+Irtv0X}hs5}gj`BKW9Eu&SMw^H7#TNz(Jp&C1%Q`WKxynJZof2VOA z!oLRAVID7xeJg4kV5Ecs0LO6P&7RWDCr$x+_2r}ZID;!lyq?k}laq7}t*MsLUcHU4 zajNiL7{AT&yEuMZ<9F$6gW10*k@gwtYZTR#2QPp2>5YWEqw7KXL4R90v}1D&0ZY#ll81iqH-J_i}#`sX^tkj16kdAUN`R9%n%y8NgNIStl9CqN#}0I z-}A*dgdnOWO$s^lQ$^)GyU@g8fey<$tmn1sgfutu){!G4j^V8si6f+dgcoNmeO|HW4scRXK^A|FQ~}Azd(LnOqsun0phl z%SA<_nTpfd-m=&ln_61l-^nn@MzGoe@ITk?*?V|c6%sX6`ZlI2v>$h4sc!T2WWF};R zfG%V;;wgF57gg(RSrPnRADKy&%8=EK*;Z*`sw=!L%@01ld38{2b|77uy7L3Md^NiN zBVT+v#TRdaBO8PFuL=(+fK^y812KW4n?~h{ZC0LC z3N`(?C&pLL{l@&N;viahV*ICBgTsDAmS#HWoNSX^PkayuTZLCak4n%$W+~8uWP3)6 zpy3f>W@Pm89`Kma0~xzrcqbmReY+*RDGO1?+rfD-@m>I?2~1-Kv#2nUD;!k_i~JhT zVisT(ehxfQg;fGpfv|RK;%_Ij+L@n&?1ecry#=%`;!U$7XXq`nGT;j4bbxKn%(nBn5ChGo+WRAEdick%Ye_J_X>KKJTrZM^a8_mmCEzf#J^DD zi*x`oq1LdpQ+jAerEwj77a!W`iz9bvvsKJ|7QTFWSl)dJdgZuEn`xt$51=20<f!}beg!EMir)rRWifF3lGS-sXE$)X8)?uUtj<}HywjKHbbGM&v`NW3_%zCiL*^xK z+@{8}N1K|wUkm+IAh&}8e*gCVV0mhU#=Qp350NbmvP2fn*|RUW^1LXL2I5r=h5N)Z z!hEqOkg&OiG*TG}FlY~CeFS-@2<(tE3OOk$8uXbo&eIW%jruvU1uVe{(8(R=ug5ec z?^G_3qWL6?zf?wC*3e8!DzBsrxvU`f58h<=Lr8W-^d&x|CS~BSK2Uc~aip@@f>cG8 zOE0Ow_R_wrz{kIm-=ScH?hZzf4&LL5WKmtxsARk>x7+#rl$S>uE&rA_UGCl9@8P-w z>M^2wlKZ&q!hHntR3aEChwXL;PF)9AOH=JCRP12qG*>AXS@l$q;fJ9LH1=sOC#zHX zyF^Jk5JP;QcJu>)x|3zbOcGg&A}P5iNIu1v1W3jN$&w&JK@f__0;%MjD~B|=_&ap^ zXFSP|0hWq3)b`TE6G7RmA*oKama$}H-ibBWN44oDaLgxG!O?BQh5?{4C^_%wj$!Er z1*Vg~^^LKRf0@5qDp!DE8#RMGAQ+8rN#Ez2fT2?9n+doEVHK3|+o&I>s{*2l=Yf() zTd_WF9z0p`ynB5=9JiD_c!({}6SsgQt#qF*$)x>pkGyRk!P++B1o1o?Rla;m{wzKu z!4n@7Af}2t1g4@+TvUXeA@(GdCW>e!hcCGSU~8ssoS0DK*Pztzje_yN$fEp@Q8a#M zSjKPdlJSjg8^5v3#(&$6@n80W^K0jfV>`<$wq3N#tF;x!sn(9Ek?W^6mvLRxz!eg% zP0|i_(+e`uD^I> None: - """Create directories for CWL outputs""" - cwl_path = self.PATH.joinpath("cwl_adapters") - cwl_path.mkdir(parents=True, exist_ok=True) - workflow_path = self.PATH.joinpath("outputs").resolve() - workflow_path.mkdir(exist_ok=True) - return cwl_path, workflow_path - - def _clean(self) -> None: - """Cleaning of redundant directories generating on running CWL""" - logger.info("Cleaning directories!!!") - destination_path = self.workflow_path.joinpath("experiment") - dir_names = ("autogenerated", "cachedir", "RUNS", "provenance") - for i, d in zip(self.wic_path.iterdir(), self.PATH.iterdir()): - if i.name.endswith(dir_names): - shutil.rmtree(d) - if d.name.endswith(dir_names): - shutil.rmtree(d) - - for d in destination_path.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.rmtree(d) - for d in self.PATH.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.move(d, destination_path) - - return - - def _move_outputs(self) -> None: - """Transfer outputs from the WIC directory to the workflow path""" - logger.info("Move outputs to workflow path!!!") - for d in self.wic_path.iterdir(): - if d.name.endswith("outdir"): - shutil.move(d, self.workflow_path) - return - - def _camel(self, name: str) -> str: - """Convert plugin name to camel case.""" - name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") - return "".join([name[0].upper(), name[1:]]) - - def _string_after_period(self, x): - """Get a string after period.""" - match = re.search(r"\.(.*)", x) - if match: - # Get the part after the period - return f".*.{match.group(1)}" - else: - return "" - - def _add_backslash_before_parentheses(self, x): - """Add backslash to generate ff_pattern and df_pattern""" - # Define the regular expression pattern to match parenthesis - pattern_1 = r"(\()|(\))" - # Use re.sub() to add a backslash before starting and finishing parenthesis - result = re.sub(pattern_1, r"\\\1\2", x) - pattern_2 = r"\d" - result = ( - result.split("_c")[0] - + "_c{c:d}" - + re.sub(pattern_2, "", result.split("_c")[1]) - ) - return result - - def create_step(self, url: str) -> api.Step: - """Generate the plugin class name from the plugin name specified in the manifest""" - manifest = pp.submit_plugin(url) - plugin_version = str(manifest.version) - cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( - self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") - ) - step = api.Step(cwl_tool) - return step - - def manifest_urls(self, x: str) -> str: - """URLs on GitHub for plugin manifests""" - - urls = { - "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", - "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", - "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", - "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", - "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", - "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", - "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/nyxus_manifest/features/nyxus-plugin/plugin.json", - } - return urls[x] - - def modify_cwl(self) -> None: - """Modify CWL to incorporate environmental variables and permission access""" - for f in list(self.cwl_path.rglob("*.cwl")): - if "cwl" in f.name: - try: - with Path.open(f, "r") as file: - config = yaml.safe_load(file) - config["requirements"]["NetworkAccess"] = { - "networkAccess": True - } - config["requirements"]["EnvVarRequirement"] = { - "envDef": {"HOME": "/home/polusai"} - } - with open(f, "w") as out_file: - yaml.dump(config, out_file) - except FileNotFoundError: - logger.info("Error: There was an unexpected error while processing the file.") - return - - def workflow(self) -> None: - """ - A CWL feature extraction pipeline. - """ - # BBBCDownload - bbbc = self.create_step(self.manifest_urls("bbbc_download")) - bbbc.name = self.name - bbbc.outDir = Path("bbbc.outDir") - - # Renaming plugin - rename = self.create_step(self.manifest_urls("file_renaming")) - rename.filePattern = self.file_pattern - rename.outFilePattern = self.out_file_pattern - rename.mapDirectory = self.map_directory - rename.inpDir = bbbc.outDir - rename.outDir = Path("rename.outDir") - - # OMEConverter - ome_converter = self.create_step(self.manifest_urls("ome_converter")) - ome_converter.filePattern = self._string_after_period(self.out_file_pattern) - ome_converter.fileExtension = ".ome.tif" - ome_converter.inpDir = rename.outDir - ome_converter.outDir = Path("ome_converter.outDir") - - if self.background_correction: - # Estimate Flatfield - estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) - estimate_flatfield.inpDir = ome_converter.outDir - estimate_flatfield.filePattern = self.image_pattern - estimate_flatfield.groupBy = self.group_by - estimate_flatfield.getDarkfield = True - estimate_flatfield.outDir = Path("estimate_flatfield.outDir") - - # # Apply Flatfield - apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) - apply_flatfield.imgDir = ome_converter.outDir - apply_flatfield.imgPattern = self.image_pattern - apply_flatfield.ffDir = estimate_flatfield.outDir - apply_flatfield.ffPattern = self.ff_pattern - apply_flatfield.dfPattern = self.df_pattern - apply_flatfield.outDir = Path("apply_flatfield.outDir") - apply_flatfield.dataType = True - - ## Kaggle Nuclei Segmentation - kaggle_nuclei_segmentation = self.create_step( - self.manifest_urls("kaggle_nuclei_segmentation") - ) - if self.background_correction: - kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir - else: - kaggle_nuclei_segmentation.inpDir = ome_converter.outDir - kaggle_nuclei_segmentation.filePattern = self.image_pattern - kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") - - ## FTL Label Plugin - ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) - ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir - ftl_plugin.connectivity = 1 - ftl_plugin.binarizationThreshold = 0.5 - ftl_plugin.outDir = Path("ftl_plugin.outDir") - - # # ## Nyxus Plugin - nyxus_plugin = self.create_step(self.manifest_urls("nyxus_plugin")) - if self.background_correction: - nyxus_plugin.inpDir = apply_flatfield.outDir - else: - nyxus_plugin.inpDir = ome_converter.outDir - nyxus_plugin.segDir = ftl_plugin.outDir - nyxus_plugin.intPattern = self.image_pattern - nyxus_plugin.segPattern = self.seg_pattern - nyxus_plugin.features = self.features - nyxus_plugin.fileExtension = self.file_extension - nyxus_plugin.neighborDist = 5 - nyxus_plugin.pixelPerMicron = 1.0 - nyxus_plugin.outDir = Path("nyxus_plugin.outDir") - - logger.info("Initiating CWL Feature Extraction Workflow!!!") - if self.background_correction: - steps = [ - bbbc, - rename, - ome_converter, - estimate_flatfield, - apply_flatfield, - kaggle_nuclei_segmentation, - ftl_plugin, - nyxus_plugin - ] - else: - steps = [ - bbbc, - rename, - ome_converter, - kaggle_nuclei_segmentation, - ftl_plugin, - nyxus_plugin - ] - - workflow = api.Workflow(steps, "experiment", self.workflow_path) - # # Saving CLT for plugins - workflow._save_all_cwl(overwrite=True) - # # Adding environmental variables for bbbc_download and ome_converter plugin - self.modify_cwl() - # # # Save yaml to run CWL tool - workflow._save_yaml() - # Compile and run using WIC python API - workflow.compile(run_local=True, overwrite=False) - # # print(workflow.yml_path) - # # clean autognerated directories - self._clean() - self._move_outputs() - logger.info("Completed CWL Feature Extraction /Analysis Workflow.") - return - \ No newline at end of file diff --git a/workflows/cwl_nuclear_segmentation.py b/workflows/cwl_nuclear_segmentation.py deleted file mode 100644 index d7d264b..0000000 --- a/workflows/cwl_nuclear_segmentation.py +++ /dev/null @@ -1,262 +0,0 @@ -import wic.api.pythonapi as api -import polus.plugins as pp -from pathlib import Path -import yaml -import logging -import typing -import re -import shutil -import sys -sys.path.append('../') -from polus.image.workflows.utils import GITHUB_TAG - -# Initialize the logger -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) - - -class CWLSegmentationWorkflow: - """ - A CWL Nuclear Segmentation pipeline. - - Attributes: - name : Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets). - file_pattern : Pattern for parsing raw filenames. - out_file_pattern : Preferred format for filenames - image_pattern : Pattern for parsing intensity image filenames after renaming when using map_directory - seg_pattern : Pattern use to parse segmentation image filenames - map_directory : Mapping of folder name - ff_pattern: The filename pattern employed to select flatfield components from the ffDir. - df_pattern:The filename pattern employed to select darkfield components from the ffDir - group_by: Grouping variables for filePattern - background_correction: Execute background correction - """ - def __init__( - self, - name: str, - file_pattern: str, - out_file_pattern: str, - image_pattern: str, - seg_pattern: str, - ff_pattern: typing.Optional[str] = '', - df_pattern: typing.Optional[str] = '', - group_by: typing.Optional[str] = '', - map_directory: typing.Optional[bool] = False, - background_correction: typing.Optional[bool] = False, - ): - self.name = name - self.file_pattern = file_pattern - self.out_file_pattern = out_file_pattern - self.map_directory = map_directory - self.ff_pattern = ff_pattern - self.df_pattern = df_pattern - self.group_by = group_by - self.wic_path = api._WIC_PATH - self.PATH = Path(self.wic_path.parent).joinpath("image-workflows") - self.cwl_path, self.workflow_path = self._create_directories() - self.image_pattern = image_pattern - self.seg_pattern = seg_pattern - self.background_correction = background_correction - - def _create_directories(self) -> None: - """Create directories for CWL outputs""" - cwl_path = self.PATH.joinpath("cwl_adapters") - cwl_path.mkdir(parents=True, exist_ok=True) - workflow_path = self.PATH.joinpath("outputs").resolve() - workflow_path.mkdir(exist_ok=True) - return cwl_path, workflow_path - - def _clean(self) -> None: - """Cleaning of redundant directories generating on running CWL""" - logger.info("Cleaning directories!!!") - destination_path = self.workflow_path.joinpath("experiment") - dir_names = ("autogenerated", "cachedir", "RUNS", "provenance", "cwl_adapters") - dir_list = [w for w in self.wic_path.iterdir() if w.is_dir() if w.name in dir_names] - for d in dir_list: - shutil.rmtree(d) - for d in destination_path.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.rmtree(d) - for d in self.PATH.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.move(d, destination_path) - - return - - def _move_outputs(self) -> None: - """Transfer outputs from the WIC directory to the workflow path""" - logger.info("Move outputs to workflow path!!!") - for d in self.wic_path.iterdir(): - if d.name.endswith("outdir"): - shutil.move(d, self.workflow_path) - return - - def _camel(self, name: str) -> str: - """Convert plugin name to camel case.""" - name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") - return "".join([name[0].upper(), name[1:]]) - - def _string_after_period(self, x): - """Get a string after period.""" - match = re.search(r"\.(.*)", x) - if match: - # Get the part after the period - return f".*.{match.group(1)}" - else: - return "" - - def _add_backslash_before_parentheses(self, x): - """Add backslash to generate ff_pattern and df_pattern""" - # Define the regular expression pattern to match parenthesis - pattern_1 = r"(\()|(\))" - # Use re.sub() to add a backslash before starting and finishing parenthesis - result = re.sub(pattern_1, r"\\\1\2", x) - pattern_2 = r"\d" - result = ( - result.split("_c")[0] - + "_c{c:d}" - + re.sub(pattern_2, "", result.split("_c")[1]) - ) - return result - - def create_step(self, url: str) -> api.Step: - """Generate the plugin class name from the plugin name specified in the manifest""" - manifest = pp.submit_plugin(url) - plugin_version = str(manifest.version) - cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( - self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") - ) - step = api.Step(cwl_tool) - return step - - def manifest_urls(self, x: str) -> str: - """URLs on GitHub for plugin manifests""" - urls = { - "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", - "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", - "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", - "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", - "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", - "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json" - } - return urls[x] - - def modify_cwl(self) -> None: - """Modify CWL to incorporate environmental variables and permission access""" - for f in list(self.cwl_path.rglob("*.cwl")): - if "cwl" in f.name: - try: - with Path.open(f, "r") as file: - config = yaml.safe_load(file) - config["requirements"]["NetworkAccess"] = { - "networkAccess": True - } - config["requirements"]["EnvVarRequirement"] = { - "envDef": {"HOME": "/home/polusai"} - } - with open(f, "w") as out_file: - yaml.dump(config, out_file) - except FileNotFoundError: - logger.info("Error: There was an unexpected error while processing the file.") - return - - def workflow(self) -> None: - """ - A CWL nuclear segmentation pipeline. - """ - # BBBCDownload - bbbc = self.create_step(self.manifest_urls("bbbc_download")) - bbbc.name = self.name - bbbc.outDir = Path("bbbc.outDir") - - # Renaming plugin - rename = self.create_step(self.manifest_urls("file_renaming")) - rename.filePattern = self.file_pattern - rename.outFilePattern = self.out_file_pattern - rename.mapDirectory = self.map_directory - rename.inpDir = bbbc.outDir - rename.outDir = Path("rename.outDir") - - - # OMEConverter - ome_converter = self.create_step(self.manifest_urls("ome_converter")) - ome_converter.filePattern = self._string_after_period(self.out_file_pattern) - ome_converter.fileExtension = ".ome.tif" - ome_converter.inpDir = rename.outDir - ome_converter.outDir = Path("ome_converter.outDir") - - if self.background_correction: - # Estimate Flatfield - estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) - estimate_flatfield.inpDir = ome_converter.outDir - estimate_flatfield.filePattern = self.image_pattern - estimate_flatfield.groupBy = self.group_by - estimate_flatfield.getDarkfield = True - estimate_flatfield.outDir = Path("estimate_flatfield.outDir") - - # # Apply Flatfield - apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) - apply_flatfield.imgDir = ome_converter.outDir - apply_flatfield.imgPattern = self.image_pattern - apply_flatfield.ffDir = estimate_flatfield.outDir - apply_flatfield.ffPattern = self.ff_pattern - apply_flatfield.dfPattern = self.df_pattern - apply_flatfield.outDir = Path("apply_flatfield.outDir") - apply_flatfield.dataType = True - - ## Kaggle Nuclei Segmentation - kaggle_nuclei_segmentation = self.create_step( - self.manifest_urls("kaggle_nuclei_segmentation") - ) - if self.background_correction: - kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir - else: - kaggle_nuclei_segmentation.inpDir = ome_converter.outDir - - kaggle_nuclei_segmentation.filePattern = self.image_pattern - kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") - - ## FTL Label Plugin - ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) - ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir - ftl_plugin.connectivity = 1 - ftl_plugin.binarizationThreshold = 0.5 - ftl_plugin.outDir = Path("ftl_plugin.outDir") - - logger.info("Initiating CWL Nuclear Segmentation Workflow!!!") - if self.background_correction: - steps = [ - bbbc, - rename, - ome_converter, - estimate_flatfield, - apply_flatfield, - kaggle_nuclei_segmentation, - ftl_plugin - ] - else: - steps = [ - bbbc, - rename, - ome_converter, - kaggle_nuclei_segmentation, - ftl_plugin] - - - - workflow = api.Workflow(steps, "experiment", self.workflow_path) - # # Saving CLT for plugins - workflow._save_all_cwl(overwrite=True) - # # Adding environmental variables for bbbc_download and ome_converter plugin - self.modify_cwl() - # # # Save yaml to run CWL tool - workflow._save_yaml() - # Compile and run using WIC python API - workflow.compile(run_local=True, overwrite=False) - # # print(workflow.yml_path) - # # clean autognerated directories - self._clean() - self._move_outputs() - logger.info("Completed CWL nuclear segmentation workflow.") - return \ No newline at end of file From 2734ca8e33a8dd1cb0f7c6c596ac94cf9d668455 Mon Sep 17 00:00:00 2001 From: hamshkhawar Date: Mon, 1 Apr 2024 17:18:31 -0500 Subject: [PATCH 03/11] updated conditional workflows --- README.md | 105 +++++++ configuration/__init__.py | 0 configuration/analysis/BBBC001.yml | 14 + configuration/analysis/BBBC039.yml | 13 + configuration/analysis/__init__.py | 0 configuration/analysis/sample.yml | 13 + configuration/segmentation/BBBC001.yml | 11 + configuration/segmentation/BBBC039.yml | 11 + configuration/segmentation/__init__.py | 0 configuration/segmentation/sample.yml | 12 + .../basic-flatfield-estimation.cwl | 0 .../bbbcdownload.cwl | 0 .../file-renaming.cwl | 0 .../image_assembler.cwl | 0 {cwl_adapters => cwl-adapters}/montage.cwl | 0 .../ome-converter.cwl | 0 .../precompute_slide.cwl | 0 pyproject.toml | 38 +++ src/polus/image/workflows/__init__.py | 0 src/polus/image/workflows/__main__.py | 65 ++++ src/polus/image/workflows/utils.py | 68 +++++ workflows/__init__.py | 0 workflows/cwl_analysis.py | 289 ++++++++++++++++++ workflows/cwl_nuclear_segmentation.py | 262 ++++++++++++++++ 24 files changed, 901 insertions(+) create mode 100644 README.md create mode 100644 configuration/__init__.py create mode 100644 configuration/analysis/BBBC001.yml create mode 100644 configuration/analysis/BBBC039.yml create mode 100644 configuration/analysis/__init__.py create mode 100644 configuration/analysis/sample.yml create mode 100644 configuration/segmentation/BBBC001.yml create mode 100644 configuration/segmentation/BBBC039.yml create mode 100644 configuration/segmentation/__init__.py create mode 100644 configuration/segmentation/sample.yml rename {cwl_adapters => cwl-adapters}/basic-flatfield-estimation.cwl (100%) rename {cwl_adapters => cwl-adapters}/bbbcdownload.cwl (100%) rename {cwl_adapters => cwl-adapters}/file-renaming.cwl (100%) rename {cwl_adapters => cwl-adapters}/image_assembler.cwl (100%) rename {cwl_adapters => cwl-adapters}/montage.cwl (100%) rename {cwl_adapters => cwl-adapters}/ome-converter.cwl (100%) rename {cwl_adapters => cwl-adapters}/precompute_slide.cwl (100%) create mode 100644 pyproject.toml create mode 100644 src/polus/image/workflows/__init__.py create mode 100644 src/polus/image/workflows/__main__.py create mode 100644 src/polus/image/workflows/utils.py create mode 100644 workflows/__init__.py create mode 100644 workflows/cwl_analysis.py create mode 100644 workflows/cwl_nuclear_segmentation.py diff --git a/README.md b/README.md new file mode 100644 index 0000000..a9bef17 --- /dev/null +++ b/README.md @@ -0,0 +1,105 @@ +# Common Workflow Language (CWL) Workflows + +CWL feature extraction workflow for imaging dataset + +## Workflow Steps: + +create a [Conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) environment using python = ">=3.9,<3.12" + +#### 1. Install polus-plugins. + +- clone a image-tools repository +`git clone https://github.com/camilovelezr/image-tools.git ../` +- cd `image-tools` +- create a new branch +`git checkout -b hd2 remotes/origin/hd2` +- `pip install .` + +#### 2. Install workflow-inference-compiler. +- clone a workflow-inference-compiler repository +`git clone https://github.com/camilovelezr/workflow-inference-compiler.git ../` +- cd `workflow-inference-compiler` +- create a new branch +`git checkout -b hd2 remotes/origin/hd2` +- `pip install -e ".[all]"` + +#### 3. Install image-workflow. +- cd `image-workflows` +- poetry install + +#### Note: +Ensure that the [docker-desktop](https://www.docker.com/products/docker-desktop/) is running in the background. To verify that it's operational, you can use the following command: +`docker run -d -p 80:80 docker/getting-started` +This command will launch the `docker/getting-started container` in detached mode (-d flag), exposing port 80 on your local machine (-p 80:80). It's a simple way to test if Docker Desktop is functioning correctly. + +## Details +This workflow integrates eight distinct plugins, starting from data retrieval from [Broad Bioimage Benchmark Collection](https://bbbc.broadinstitute.org/), renaming files, correcting uneven illumination, segmenting nuclear objects, and culminating in the extraction of features from identified objects + +Below are the specifics of the plugins employed in the workflow +1. [bbbc-download-plugin](https://github.com/saketprem/polus-plugins/tree/bbbc_download/utils/bbbc-download-plugin) +2. [file-renaming-tool](https://github.com/PolusAI/image-tools/tree/master/formats/file-renaming-tool) +3. [ome-converter-tool](https://github.com/PolusAI/image-tools/tree/master/formats/ome-converter-tool) +4. [basic-flatfield-estimation-tool](https://github.com/PolusAI/image-tools/tree/master/regression/basic-flatfield-estimation-tool) +5. [apply-flatfield-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/apply-flatfield-tool) +6. [kaggle-nuclei-segmentation](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/segmentation/kaggle-nuclei-segmentation) +7. [polus-ftl-label-plugin](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/transforms/images/polus-ftl-label-plugin) +8. [nyxus-plugin](https://github.com/PolusAI/image-tools/tree/kaggle-nuclei_seg/features/nyxus-plugin) + +## Execute CWL workflows +Three different CWL workflows can be executed for specific datasets +1. segmentation +2. analysis + +During the execution of the segmentation workflow, `1 to 7` plugins will be utilized. However, for executing the analysis workflow, `1 to 8` plugins will be employed. +If a user wishes to execute a workflow for a new dataset, they can utilize a sample YAML file to input parameter values. This YAML file can be saved in the desired subdirectory of the `configuration` folder with the name `dataset.yml` + +If a user opts to run a workflow without background correction, they can set `background_correction` to false. In this case, the workflow will skip steps `4 and 5` + +`python -m polus.image.workflows --name="BBBC001" --workflow=analysis` + +A directory named `outputs` is generated, encompassing CLTs for each plugin, YAML files, and all outputs are stored within the `outdir` directory. +``` +outputs +├── experiment +│ └── cwl_adapters +| experiment.cwl +| experiment.yml +| +└── outdir + └── experiment + ├── step 1 BbbcDownload + │ └── outDir + │ └── bbbc.outDir + │ └── BBBC + │ └── BBBC039 + │ └── raw + │ ├── Ground_Truth + │ │ ├── masks + │ │ └── metadata + │ └── Images + │ └── images + ├── step 2 FileRenaming + │ └── outDir + │ └── rename.outDir + ├── step 3 OmeConverter + │ └── outDir + │ └── ome_converter.outDir + ├── step 4 BasicFlatfieldEstimation + │ └── outDir + │ └── estimate_flatfield.outDir + ├── step 5 ApplyFlatfield + │ └── outDir + │ └── apply_flatfield.outDir + ├── step 6 KaggleNucleiSegmentation + │ └── outDir + │ └── kaggle_nuclei_segmentation.outDir + ├── step 7 FtlLabel + │ └── outDir + │ └── ftl_plugin.outDir + └── step 8 NyxusPlugin + └── outDir + └── nyxus_plugin.outDir + +``` +#### Note: +Step 7 and step 8 are executed only in the case of the `analysis` workflow. \ No newline at end of file diff --git a/configuration/__init__.py b/configuration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/analysis/BBBC001.yml b/configuration/analysis/BBBC001.yml new file mode 100644 index 0000000..7efe214 --- /dev/null +++ b/configuration/analysis/BBBC001.yml @@ -0,0 +1,14 @@ +--- +name : BBBC001 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif +ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +features: ALL +file_extension: pandas +background_correction: false + diff --git a/configuration/analysis/BBBC039.yml b/configuration/analysis/BBBC039.yml new file mode 100644 index 0000000..308a274 --- /dev/null +++ b/configuration/analysis/BBBC039.yml @@ -0,0 +1,13 @@ +--- +name : BBBC039 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif +ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +features: "ALL_INTENSITY" +file_extension: pandas +background_correction: false \ No newline at end of file diff --git a/configuration/analysis/__init__.py b/configuration/analysis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/analysis/sample.yml b/configuration/analysis/sample.yml new file mode 100644 index 0000000..47ffb02 --- /dev/null +++ b/configuration/analysis/sample.yml @@ -0,0 +1,13 @@ +--- +name : +file_pattern : +out_file_pattern : +image_pattern: +seg_pattern: +ff_pattern: +df_pattern: +group_by: +map_directory: +features: +file_extension: +background_correction: \ No newline at end of file diff --git a/configuration/segmentation/BBBC001.yml b/configuration/segmentation/BBBC001.yml new file mode 100644 index 0000000..4ed7653 --- /dev/null +++ b/configuration/segmentation/BBBC001.yml @@ -0,0 +1,11 @@ +--- +name : BBBC001 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif +ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/BBBC039.yml b/configuration/segmentation/BBBC039.yml new file mode 100644 index 0000000..1884878 --- /dev/null +++ b/configuration/segmentation/BBBC039.yml @@ -0,0 +1,11 @@ +--- +name : BBBC039 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif +ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false \ No newline at end of file diff --git a/configuration/segmentation/__init__.py b/configuration/segmentation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/segmentation/sample.yml b/configuration/segmentation/sample.yml new file mode 100644 index 0000000..ecc82e1 --- /dev/null +++ b/configuration/segmentation/sample.yml @@ -0,0 +1,12 @@ +--- +name : +file_pattern : +out_file_pattern : +image_pattern: +seg_pattern: +ff_pattern: +df_pattern: +group_by: +map_directory: +features: +file_extension: \ No newline at end of file diff --git a/cwl_adapters/basic-flatfield-estimation.cwl b/cwl-adapters/basic-flatfield-estimation.cwl similarity index 100% rename from cwl_adapters/basic-flatfield-estimation.cwl rename to cwl-adapters/basic-flatfield-estimation.cwl diff --git a/cwl_adapters/bbbcdownload.cwl b/cwl-adapters/bbbcdownload.cwl similarity index 100% rename from cwl_adapters/bbbcdownload.cwl rename to cwl-adapters/bbbcdownload.cwl diff --git a/cwl_adapters/file-renaming.cwl b/cwl-adapters/file-renaming.cwl similarity index 100% rename from cwl_adapters/file-renaming.cwl rename to cwl-adapters/file-renaming.cwl diff --git a/cwl_adapters/image_assembler.cwl b/cwl-adapters/image_assembler.cwl similarity index 100% rename from cwl_adapters/image_assembler.cwl rename to cwl-adapters/image_assembler.cwl diff --git a/cwl_adapters/montage.cwl b/cwl-adapters/montage.cwl similarity index 100% rename from cwl_adapters/montage.cwl rename to cwl-adapters/montage.cwl diff --git a/cwl_adapters/ome-converter.cwl b/cwl-adapters/ome-converter.cwl similarity index 100% rename from cwl_adapters/ome-converter.cwl rename to cwl-adapters/ome-converter.cwl diff --git a/cwl_adapters/precompute_slide.cwl b/cwl-adapters/precompute_slide.cwl similarity index 100% rename from cwl_adapters/precompute_slide.cwl rename to cwl-adapters/precompute_slide.cwl diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..85287fa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,38 @@ +[tool.poetry] +name = "polus-image-workflows" +version = "0.1.1-dev1" +description = "Build and execute pipelines of polus plugins on Compute." +authors = ["Hamdah Shafqat Abbasi "] +readme = "README.md" +packages = [{include = "polus", from = "src"}] + +[tool.poetry.dependencies] +python = ">=3.9,<3.12" +typer = "^0.9.0" +pyyaml = "^6.0.1" +pydantic = "^2.6.1" +cwl-utils="0.31" +toil="^5.12" +polus-plugins = {path = "../image-tools", develop = true} +workflow-inference-compiler = {path = "../workflow-inference-compiler", develop = true} + +[tool.poetry.group.dev.dependencies] +jupyter = "^1.0.0" +nbconvert = "^7.11.0" +pytest = "^7.4.4" +bump2version = "^1.0.1" +pre-commit = "^3.3.3" +black = "^23.3.0" +ruff = "^0.0.274" +mypy = "^1.4.0" +pytest-xdist = "^3.3.1" +pytest-sugar = "^0.9.7" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +addopts = [ + "--import-mode=importlib", +] \ No newline at end of file diff --git a/src/polus/image/workflows/__init__.py b/src/polus/image/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/polus/image/workflows/__main__.py b/src/polus/image/workflows/__main__.py new file mode 100644 index 0000000..863f1ef --- /dev/null +++ b/src/polus/image/workflows/__main__.py @@ -0,0 +1,65 @@ +"""CWL Workflow.""" +import logging +import typer +from pathlib import Path +from polus.image.workflows.utils import LoadYaml +from workflows.cwl_analysis import CWLAnalysisWorkflow +from workflows.cwl_nuclear_segmentation import CWLSegmentationWorkflow +from pathlib import Path + + +app = typer.Typer() + +# Initialize the logger +logging.basicConfig( + format="%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s", + datefmt="%d-%b-%y %H:%M:%S", +) +logger = logging.getLogger("WIC Python API") +logger.setLevel(logging.INFO) + + +@app.command() +def main( + name: str = typer.Option( + ..., + "--name", + "-n", + help="Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets)" + ), + workflow: str = typer.Option( + ..., + "--workflow", + "-w", + help="Name of cwl workflow" + ) +) -> None: + + """Execute CWL Workflow.""" + + logger.info(f"name = {name}") + logger.info(f"workflow = {workflow}") + + config_path = Path(__file__).parent.parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") + print(config_path) + + + model = LoadYaml(workflow=workflow, config_path=config_path) + params = model.parse_yaml() + + if workflow == "analysis": + logger.info(f"Executing {workflow}!!!") + model = CWLAnalysisWorkflow(**params) + model.workflow() + + if workflow == "segmentation": + logger.info(f"Executing {workflow}!!!") + model = CWLSegmentationWorkflow(**params) + model.workflow() + + + logger.info("Completed CWL workflow!!!") + + +if __name__ == "__main__": + app() \ No newline at end of file diff --git a/src/polus/image/workflows/utils.py b/src/polus/image/workflows/utils.py new file mode 100644 index 0000000..7daa9b7 --- /dev/null +++ b/src/polus/image/workflows/utils.py @@ -0,0 +1,68 @@ +import pydantic +from pathlib import Path +from typing import Dict +from typing import Union +import yaml + + +GITHUB_TAG = "https://raw.githubusercontent.com" + + +ANALYSIS_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "features", "file_extension", "background_correction"] +SEG_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "background_correction"] + + +class DataModel(pydantic.BaseModel): + data: Dict[str, Dict[str, Union[str, bool]]] + + +class LoadYaml(pydantic.BaseModel): + """Validation of Dataset yaml.""" + workflow:str + config_path: Union[str, Path] + + @pydantic.validator("config_path", pre=True) + @classmethod + def validate_path(cls, value: Union[str, Path]) -> Union[str, Path]: + """Validation of Paths.""" + if not Path(value).exists(): + msg = f"{value} does not exist! Please do check it again" + raise ValueError(msg) + if isinstance(value, str): + return Path(value) + return value + + @pydantic.validator("workflow", pre=True) + @classmethod + def validate_workflow_name(cls, value: str) -> str: + """Validation of workflow name.""" + if not value in ["analysis", "segmentation", "visualization"]: + msg = f"Please choose a valid workflow name i-e analysis segmentation visualization" + raise ValueError(msg) + return value + + def parse_yaml(self) -> Dict[str, Union[str, bool]]: + """Parsing yaml configuration file for each dataset.""" + + with open(f'{self.config_path}','r') as f: + data = yaml.safe_load(f) + + check_values = any([v for _, v in data.items() if f is None]) + + if check_values is True: + msg = f"All the parameters are not defined! Please do check it again" + raise ValueError(msg) + + + if self.workflow == "analysis": + if data['background_correction'] == True: + if list(data.keys()) != ANALYSIS_KEYS: + msg = f"Please do check parameters again for analysis workflow!!" + raise ValueError(msg) + + if self.workflow == "segmentation": + if data['background_correction'] == True: + if list(data.keys()) != SEG_KEYS: + msg = f"Please do check parameters again for segmentation workflow!!" + raise ValueError(msg) + return data diff --git a/workflows/__init__.py b/workflows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/workflows/cwl_analysis.py b/workflows/cwl_analysis.py new file mode 100644 index 0000000..345c0af --- /dev/null +++ b/workflows/cwl_analysis.py @@ -0,0 +1,289 @@ +import wic.api.pythonapi as api +import polus.plugins as pp +from pathlib import Path +import yaml +import logging +import re +import shutil +import typing +import sys +sys.path.append('../') +from polus.image.workflows.utils import GITHUB_TAG + +# Initialize the logger +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class CWLAnalysisWorkflow: + """ + A CWL feature extraction or Analysis pipeline. + + Attributes: + name : Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets). + file_pattern : Pattern for parsing raw filenames. + out_file_pattern : Preferred format for filenames + image_pattern : Pattern for parsing intensity image filenames after renaming when using map_directory + seg_pattern : Pattern use to parse segmentation image filenames + map_directory : Mapping of folder name + ff_pattern: The filename pattern employed to select flatfield components from the ffDir. + df_pattern:The filename pattern employed to select darkfield components from the ffDir + group_by: Grouping variables for filePattern + features:Features from Nyxus (https://github.com/PolusAI/nyxus/) that need extraction + file_extension: Output file format + background_correction: Execute background correction + """ + def __init__( + self, + name: str, + file_pattern: str, + out_file_pattern: str, + image_pattern: str, + seg_pattern: str, + ff_pattern: typing.Optional[str] = '', + df_pattern: typing.Optional[str] = '', + group_by: typing.Optional[str] = '', + map_directory: typing.Optional[bool] = False, + features: typing.Optional[str]="ALL", + file_extension: typing.Optional[str]="arrowipc", + background_correction: typing.Optional[bool] = False + + ): + self.name = name + self.file_pattern = file_pattern + self.out_file_pattern = out_file_pattern + self.ff_pattern = ff_pattern + self.df_pattern = df_pattern + self.group_by = group_by + self.wic_path = api._WIC_PATH + self.PATH = Path(self.wic_path.parent).joinpath("image-workflows") + self.cwl_path, self.workflow_path = self._create_directories() + self.image_pattern = image_pattern + self.seg_pattern = seg_pattern + self.features = features + self.file_extension = file_extension + self.map_directory = map_directory + self.background_correction = background_correction + + def _create_directories(self) -> None: + """Create directories for CWL outputs""" + cwl_path = self.PATH.joinpath("cwl_adapters") + cwl_path.mkdir(parents=True, exist_ok=True) + workflow_path = self.PATH.joinpath("outputs").resolve() + workflow_path.mkdir(exist_ok=True) + return cwl_path, workflow_path + + def _clean(self) -> None: + """Cleaning of redundant directories generating on running CWL""" + logger.info("Cleaning directories!!!") + destination_path = self.workflow_path.joinpath("experiment") + dir_names = ("autogenerated", "cachedir", "RUNS", "provenance") + for i, d in zip(self.wic_path.iterdir(), self.PATH.iterdir()): + if i.name.endswith(dir_names): + shutil.rmtree(d) + if d.name.endswith(dir_names): + shutil.rmtree(d) + + for d in destination_path.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.rmtree(d) + for d in self.PATH.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.move(d, destination_path) + + return + + def _move_outputs(self) -> None: + """Transfer outputs from the WIC directory to the workflow path""" + logger.info("Move outputs to workflow path!!!") + for d in self.wic_path.iterdir(): + if d.name.endswith("outdir"): + shutil.move(d, self.workflow_path) + return + + def _camel(self, name: str) -> str: + """Convert plugin name to camel case.""" + name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + return "".join([name[0].upper(), name[1:]]) + + def _string_after_period(self, x): + """Get a string after period.""" + match = re.search(r"\.(.*)", x) + if match: + # Get the part after the period + return f".*.{match.group(1)}" + else: + return "" + + def _add_backslash_before_parentheses(self, x): + """Add backslash to generate ff_pattern and df_pattern""" + # Define the regular expression pattern to match parenthesis + pattern_1 = r"(\()|(\))" + # Use re.sub() to add a backslash before starting and finishing parenthesis + result = re.sub(pattern_1, r"\\\1\2", x) + pattern_2 = r"\d" + result = ( + result.split("_c")[0] + + "_c{c:d}" + + re.sub(pattern_2, "", result.split("_c")[1]) + ) + return result + + def create_step(self, url: str) -> api.Step: + """Generate the plugin class name from the plugin name specified in the manifest""" + manifest = pp.submit_plugin(url) + plugin_version = str(manifest.version) + cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( + self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + ) + step = api.Step(cwl_tool) + return step + + def manifest_urls(self, x: str) -> str: + """URLs on GitHub for plugin manifests""" + + urls = { + "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", + "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", + "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", + "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", + "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/nyxus_manifest/features/nyxus-plugin/plugin.json", + } + return urls[x] + + def modify_cwl(self) -> None: + """Modify CWL to incorporate environmental variables and permission access""" + for f in list(self.cwl_path.rglob("*.cwl")): + if "cwl" in f.name: + try: + with Path.open(f, "r") as file: + config = yaml.safe_load(file) + config["requirements"]["NetworkAccess"] = { + "networkAccess": True + } + config["requirements"]["EnvVarRequirement"] = { + "envDef": {"HOME": "/home/polusai"} + } + with open(f, "w") as out_file: + yaml.dump(config, out_file) + except FileNotFoundError: + logger.info("Error: There was an unexpected error while processing the file.") + return + + def workflow(self) -> None: + """ + A CWL feature extraction pipeline. + """ + # BBBCDownload + bbbc = self.create_step(self.manifest_urls("bbbc_download")) + bbbc.name = self.name + bbbc.outDir = Path("bbbc.outDir") + + # Renaming plugin + rename = self.create_step(self.manifest_urls("file_renaming")) + rename.filePattern = self.file_pattern + rename.outFilePattern = self.out_file_pattern + rename.mapDirectory = self.map_directory + rename.inpDir = bbbc.outDir + rename.outDir = Path("rename.outDir") + + # OMEConverter + ome_converter = self.create_step(self.manifest_urls("ome_converter")) + ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + ome_converter.fileExtension = ".ome.tif" + ome_converter.inpDir = rename.outDir + ome_converter.outDir = Path("ome_converter.outDir") + + if self.background_correction: + # Estimate Flatfield + estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield.inpDir = ome_converter.outDir + estimate_flatfield.filePattern = self.image_pattern + estimate_flatfield.groupBy = self.group_by + estimate_flatfield.getDarkfield = True + estimate_flatfield.outDir = Path("estimate_flatfield.outDir") + + # # Apply Flatfield + apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield.imgDir = ome_converter.outDir + apply_flatfield.imgPattern = self.image_pattern + apply_flatfield.ffDir = estimate_flatfield.outDir + apply_flatfield.ffPattern = self.ff_pattern + apply_flatfield.dfPattern = self.df_pattern + apply_flatfield.outDir = Path("apply_flatfield.outDir") + apply_flatfield.dataType = True + + ## Kaggle Nuclei Segmentation + kaggle_nuclei_segmentation = self.create_step( + self.manifest_urls("kaggle_nuclei_segmentation") + ) + if self.background_correction: + kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir + else: + kaggle_nuclei_segmentation.inpDir = ome_converter.outDir + kaggle_nuclei_segmentation.filePattern = self.image_pattern + kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + + ## FTL Label Plugin + ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) + ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir + ftl_plugin.connectivity = 1 + ftl_plugin.binarizationThreshold = 0.5 + ftl_plugin.outDir = Path("ftl_plugin.outDir") + + # # ## Nyxus Plugin + nyxus_plugin = self.create_step(self.manifest_urls("nyxus_plugin")) + if self.background_correction: + nyxus_plugin.inpDir = apply_flatfield.outDir + else: + nyxus_plugin.inpDir = ome_converter.outDir + nyxus_plugin.segDir = ftl_plugin.outDir + nyxus_plugin.intPattern = self.image_pattern + nyxus_plugin.segPattern = self.seg_pattern + nyxus_plugin.features = self.features + nyxus_plugin.fileExtension = self.file_extension + nyxus_plugin.neighborDist = 5 + nyxus_plugin.pixelPerMicron = 1.0 + nyxus_plugin.outDir = Path("nyxus_plugin.outDir") + + logger.info("Initiating CWL Feature Extraction Workflow!!!") + if self.background_correction: + steps = [ + bbbc, + rename, + ome_converter, + estimate_flatfield, + apply_flatfield, + kaggle_nuclei_segmentation, + ftl_plugin, + nyxus_plugin + ] + else: + steps = [ + bbbc, + rename, + ome_converter, + kaggle_nuclei_segmentation, + ftl_plugin, + nyxus_plugin + ] + + workflow = api.Workflow(steps, "experiment", self.workflow_path) + # # Saving CLT for plugins + workflow._save_all_cwl(overwrite=True) + # # Adding environmental variables for bbbc_download and ome_converter plugin + self.modify_cwl() + # # # Save yaml to run CWL tool + workflow._save_yaml() + # Compile and run using WIC python API + workflow.compile(run_local=True, overwrite=False) + # # print(workflow.yml_path) + # # clean autognerated directories + self._clean() + self._move_outputs() + logger.info("Completed CWL Feature Extraction /Analysis Workflow.") + return + \ No newline at end of file diff --git a/workflows/cwl_nuclear_segmentation.py b/workflows/cwl_nuclear_segmentation.py new file mode 100644 index 0000000..d7d264b --- /dev/null +++ b/workflows/cwl_nuclear_segmentation.py @@ -0,0 +1,262 @@ +import wic.api.pythonapi as api +import polus.plugins as pp +from pathlib import Path +import yaml +import logging +import typing +import re +import shutil +import sys +sys.path.append('../') +from polus.image.workflows.utils import GITHUB_TAG + +# Initialize the logger +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class CWLSegmentationWorkflow: + """ + A CWL Nuclear Segmentation pipeline. + + Attributes: + name : Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets). + file_pattern : Pattern for parsing raw filenames. + out_file_pattern : Preferred format for filenames + image_pattern : Pattern for parsing intensity image filenames after renaming when using map_directory + seg_pattern : Pattern use to parse segmentation image filenames + map_directory : Mapping of folder name + ff_pattern: The filename pattern employed to select flatfield components from the ffDir. + df_pattern:The filename pattern employed to select darkfield components from the ffDir + group_by: Grouping variables for filePattern + background_correction: Execute background correction + """ + def __init__( + self, + name: str, + file_pattern: str, + out_file_pattern: str, + image_pattern: str, + seg_pattern: str, + ff_pattern: typing.Optional[str] = '', + df_pattern: typing.Optional[str] = '', + group_by: typing.Optional[str] = '', + map_directory: typing.Optional[bool] = False, + background_correction: typing.Optional[bool] = False, + ): + self.name = name + self.file_pattern = file_pattern + self.out_file_pattern = out_file_pattern + self.map_directory = map_directory + self.ff_pattern = ff_pattern + self.df_pattern = df_pattern + self.group_by = group_by + self.wic_path = api._WIC_PATH + self.PATH = Path(self.wic_path.parent).joinpath("image-workflows") + self.cwl_path, self.workflow_path = self._create_directories() + self.image_pattern = image_pattern + self.seg_pattern = seg_pattern + self.background_correction = background_correction + + def _create_directories(self) -> None: + """Create directories for CWL outputs""" + cwl_path = self.PATH.joinpath("cwl_adapters") + cwl_path.mkdir(parents=True, exist_ok=True) + workflow_path = self.PATH.joinpath("outputs").resolve() + workflow_path.mkdir(exist_ok=True) + return cwl_path, workflow_path + + def _clean(self) -> None: + """Cleaning of redundant directories generating on running CWL""" + logger.info("Cleaning directories!!!") + destination_path = self.workflow_path.joinpath("experiment") + dir_names = ("autogenerated", "cachedir", "RUNS", "provenance", "cwl_adapters") + dir_list = [w for w in self.wic_path.iterdir() if w.is_dir() if w.name in dir_names] + for d in dir_list: + shutil.rmtree(d) + for d in destination_path.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.rmtree(d) + for d in self.PATH.iterdir(): + if d.name.endswith("cwl_adapters"): + shutil.move(d, destination_path) + + return + + def _move_outputs(self) -> None: + """Transfer outputs from the WIC directory to the workflow path""" + logger.info("Move outputs to workflow path!!!") + for d in self.wic_path.iterdir(): + if d.name.endswith("outdir"): + shutil.move(d, self.workflow_path) + return + + def _camel(self, name: str) -> str: + """Convert plugin name to camel case.""" + name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + return "".join([name[0].upper(), name[1:]]) + + def _string_after_period(self, x): + """Get a string after period.""" + match = re.search(r"\.(.*)", x) + if match: + # Get the part after the period + return f".*.{match.group(1)}" + else: + return "" + + def _add_backslash_before_parentheses(self, x): + """Add backslash to generate ff_pattern and df_pattern""" + # Define the regular expression pattern to match parenthesis + pattern_1 = r"(\()|(\))" + # Use re.sub() to add a backslash before starting and finishing parenthesis + result = re.sub(pattern_1, r"\\\1\2", x) + pattern_2 = r"\d" + result = ( + result.split("_c")[0] + + "_c{c:d}" + + re.sub(pattern_2, "", result.split("_c")[1]) + ) + return result + + def create_step(self, url: str) -> api.Step: + """Generate the plugin class name from the plugin name specified in the manifest""" + manifest = pp.submit_plugin(url) + plugin_version = str(manifest.version) + cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( + self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + ) + step = api.Step(cwl_tool) + return step + + def manifest_urls(self, x: str) -> str: + """URLs on GitHub for plugin manifests""" + urls = { + "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", + "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", + "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", + "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json" + } + return urls[x] + + def modify_cwl(self) -> None: + """Modify CWL to incorporate environmental variables and permission access""" + for f in list(self.cwl_path.rglob("*.cwl")): + if "cwl" in f.name: + try: + with Path.open(f, "r") as file: + config = yaml.safe_load(file) + config["requirements"]["NetworkAccess"] = { + "networkAccess": True + } + config["requirements"]["EnvVarRequirement"] = { + "envDef": {"HOME": "/home/polusai"} + } + with open(f, "w") as out_file: + yaml.dump(config, out_file) + except FileNotFoundError: + logger.info("Error: There was an unexpected error while processing the file.") + return + + def workflow(self) -> None: + """ + A CWL nuclear segmentation pipeline. + """ + # BBBCDownload + bbbc = self.create_step(self.manifest_urls("bbbc_download")) + bbbc.name = self.name + bbbc.outDir = Path("bbbc.outDir") + + # Renaming plugin + rename = self.create_step(self.manifest_urls("file_renaming")) + rename.filePattern = self.file_pattern + rename.outFilePattern = self.out_file_pattern + rename.mapDirectory = self.map_directory + rename.inpDir = bbbc.outDir + rename.outDir = Path("rename.outDir") + + + # OMEConverter + ome_converter = self.create_step(self.manifest_urls("ome_converter")) + ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + ome_converter.fileExtension = ".ome.tif" + ome_converter.inpDir = rename.outDir + ome_converter.outDir = Path("ome_converter.outDir") + + if self.background_correction: + # Estimate Flatfield + estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield.inpDir = ome_converter.outDir + estimate_flatfield.filePattern = self.image_pattern + estimate_flatfield.groupBy = self.group_by + estimate_flatfield.getDarkfield = True + estimate_flatfield.outDir = Path("estimate_flatfield.outDir") + + # # Apply Flatfield + apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield.imgDir = ome_converter.outDir + apply_flatfield.imgPattern = self.image_pattern + apply_flatfield.ffDir = estimate_flatfield.outDir + apply_flatfield.ffPattern = self.ff_pattern + apply_flatfield.dfPattern = self.df_pattern + apply_flatfield.outDir = Path("apply_flatfield.outDir") + apply_flatfield.dataType = True + + ## Kaggle Nuclei Segmentation + kaggle_nuclei_segmentation = self.create_step( + self.manifest_urls("kaggle_nuclei_segmentation") + ) + if self.background_correction: + kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir + else: + kaggle_nuclei_segmentation.inpDir = ome_converter.outDir + + kaggle_nuclei_segmentation.filePattern = self.image_pattern + kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + + ## FTL Label Plugin + ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) + ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir + ftl_plugin.connectivity = 1 + ftl_plugin.binarizationThreshold = 0.5 + ftl_plugin.outDir = Path("ftl_plugin.outDir") + + logger.info("Initiating CWL Nuclear Segmentation Workflow!!!") + if self.background_correction: + steps = [ + bbbc, + rename, + ome_converter, + estimate_flatfield, + apply_flatfield, + kaggle_nuclei_segmentation, + ftl_plugin + ] + else: + steps = [ + bbbc, + rename, + ome_converter, + kaggle_nuclei_segmentation, + ftl_plugin] + + + + workflow = api.Workflow(steps, "experiment", self.workflow_path) + # # Saving CLT for plugins + workflow._save_all_cwl(overwrite=True) + # # Adding environmental variables for bbbc_download and ome_converter plugin + self.modify_cwl() + # # # Save yaml to run CWL tool + workflow._save_yaml() + # Compile and run using WIC python API + workflow.compile(run_local=True, overwrite=False) + # # print(workflow.yml_path) + # # clean autognerated directories + self._clean() + self._move_outputs() + logger.info("Completed CWL nuclear segmentation workflow.") + return \ No newline at end of file From 02c81699aed474661baf4a62c03ab97db8474d99 Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Wed, 25 Sep 2024 13:21:01 -0400 Subject: [PATCH 04/11] updated_workflows_using sophios --- cwl-adapters/bbbcdownload.cwl | 57 ------- cwl-adapters/ome-converter.cwl | 81 ---------- cwl_adapters/FileRenaming.cwl | 41 +++++ cwl_adapters/FtlLabel.cwl | 37 +++++ cwl_adapters/KaggleNucleiSegmentation.cwl | 37 +++++ cwl_adapters/OmeConverter.cwl | 37 +++++ .../basic-flatfield-estimation.cwl | 138 +++++++--------- cwl_adapters/bbbcdownload.cwl | 29 ++++ .../file-renaming.cwl | 95 +++++------ .../image_assembler.cwl | 128 +++++++-------- {cwl-adapters => cwl_adapters}/montage.cwl | 149 ++++++++---------- cwl_adapters/ome-converter.cwl | 57 +++++++ .../precompute_slide.cwl | 100 +++++------- pyproject.toml | 18 ++- src/image/workflows/__init__.py | 2 + src/{polus => }/image/workflows/__main__.py | 23 +-- .../cwl_nuclear_segmentation.cpython-310.pyc | Bin 0 -> 7048 bytes .../__pycache__/utils.cpython-310.pyc | Bin 0 -> 2779 bytes {workflows => src/image/workflows}/bbbc.py | 0 {workflows => src/image/workflows}/bbbc.wic | 0 src/image/workflows/bbbc.yml | 39 +++++ .../image/workflows}/bbbc_sub.py | 0 .../image/workflows}/cwl_analysis.py | 8 +- .../workflows}/cwl_nuclear_segmentation.py | 94 ++++------- src/{polus => }/image/workflows/utils.py | 0 src/polus/image/workflows/__init__.py | 0 workflows/__init__.py | 0 27 files changed, 592 insertions(+), 578 deletions(-) delete mode 100644 cwl-adapters/bbbcdownload.cwl delete mode 100644 cwl-adapters/ome-converter.cwl create mode 100644 cwl_adapters/FileRenaming.cwl create mode 100644 cwl_adapters/FtlLabel.cwl create mode 100644 cwl_adapters/KaggleNucleiSegmentation.cwl create mode 100644 cwl_adapters/OmeConverter.cwl rename {cwl-adapters => cwl_adapters}/basic-flatfield-estimation.cwl (53%) create mode 100644 cwl_adapters/bbbcdownload.cwl rename {cwl-adapters => cwl_adapters}/file-renaming.cwl (62%) rename {cwl-adapters => cwl_adapters}/image_assembler.cwl (56%) rename {cwl-adapters => cwl_adapters}/montage.cwl (60%) create mode 100644 cwl_adapters/ome-converter.cwl rename {cwl-adapters => cwl_adapters}/precompute_slide.cwl (64%) create mode 100644 src/image/workflows/__init__.py rename src/{polus => }/image/workflows/__main__.py (67%) create mode 100644 src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc create mode 100644 src/image/workflows/__pycache__/utils.cpython-310.pyc rename {workflows => src/image/workflows}/bbbc.py (100%) rename {workflows => src/image/workflows}/bbbc.wic (100%) create mode 100644 src/image/workflows/bbbc.yml rename {workflows => src/image/workflows}/bbbc_sub.py (100%) rename {workflows => src/image/workflows}/cwl_analysis.py (98%) rename {workflows => src/image/workflows}/cwl_nuclear_segmentation.py (75%) rename src/{polus => }/image/workflows/utils.py (100%) delete mode 100644 src/polus/image/workflows/__init__.py delete mode 100644 workflows/__init__.py diff --git a/cwl-adapters/bbbcdownload.cwl b/cwl-adapters/bbbcdownload.cwl deleted file mode 100644 index 9fba292..0000000 --- a/cwl-adapters/bbbcdownload.cwl +++ /dev/null @@ -1,57 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.1 - -label: BBBC Download - -doc: |- - Downloads the datasets on the Broad Bioimage Benchmark Collection website - https://github.com/saketprem/polus-plugins/tree/bbbc_download/utils/bbbc-download-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/bbbc-download-plugin:0.1.0-dev1 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - # NOTE: By default, "tools must not assume network access, except for localhost" - # See https://www.commonwl.org/v1.1/CommandLineTool.html#NetworkAccess - NetworkAccess: - networkAccess: true - -inputs: - name: - label: The name of the dataset(s) to be downloaded (separate the datasets with a comma. eg BBBC001,BBBC002,BBBC003) - doc: |- - The name of the dataset(s) to be downloaded (separate the datasets with a comma. eg BBBC001,BBBC002,BBBC003) - inputBinding: - prefix: --name - type: string - # default: BBBC001 - - outDir: - label: Output collection - doc: |- - Output collection - inputBinding: - prefix: --outDir - type: Directory - -outputs: - outDir: - label: Output collection - doc: |- - Output collection - type: Directory - outputBinding: - glob: $(inputs.outDir.basename) - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: "https://raw.githubusercontent.com/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json" \ No newline at end of file diff --git a/cwl-adapters/ome-converter.cwl b/cwl-adapters/ome-converter.cwl deleted file mode 100644 index 7dd5607..0000000 --- a/cwl-adapters/ome-converter.cwl +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 - -label: OME Zarr Converter - -doc: |- - This WIPP plugin converts BioFormats supported data types to the OME Zarr file format. - https://github.com/PolusAI/polus-plugins/tree/master/formats/ome-converter-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} -# NOTE: polusai/ome-converter-plugin:0.3.1 uses the base image -# polusai/bfio:2.3.2 which now un-bundles the java maven package -# ome:formats-gpl:7.1.0 due to licensing reasons. -# To avoid requiring network access at runtime, in the bfio Dockerfile -# it is pre-installed and saved in ~/.m2/ However, by default -# CWL hides all environment variables (including HOME), so we need to -# set HOME here so that at runtime we get a cache hit on the maven install. - EnvVarRequirement: -# See https://www.commonwl.org/user_guide/topics/environment-variables.html - envDef: - HOME: /home/polusai - -inputs: - inpDir: - label: Input generic data collection to be processed by this plugin - doc: |- - Input generic data collection to be processed by this plugin - type: Directory - inputBinding: - prefix: --inpDir - - filePattern: - label: A filepattern, used to select data for conversion - doc: |- - A filepattern, used to select data for conversion - type: string - inputBinding: - prefix: --filePattern - - fileExtension: - label: The file extension - doc: |- - The file extension - type: string - inputBinding: - prefix: --fileExtension - default: "default" # enum: .ome.tiff, .ome.zarr, default - - outDir: - label: Output collection - doc: |- - Output collection - type: Directory - inputBinding: - prefix: --outDir - -outputs: - outDir: - label: Output collection - doc: |- - Output collection - type: Directory - outputBinding: - glob: $(inputs.outDir.basename) - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: https://raw.githubusercontent.com/PolusAI/polus-plugins/master/formats/ome-converter-plugin/plugin.json \ No newline at end of file diff --git a/cwl_adapters/FileRenaming.cwl b/cwl_adapters/FileRenaming.cwl new file mode 100644 index 0000000..1e154ed --- /dev/null +++ b/cwl_adapters/FileRenaming.cwl @@ -0,0 +1,41 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + mapDirectory: + inputBinding: + prefix: --mapDirectory + type: boolean? + outDir: + inputBinding: + prefix: --outDir + type: Directory + outFilePattern: + inputBinding: + prefix: --outFilePattern + type: string +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/file-renaming-tool:0.2.4-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/FtlLabel.cwl b/cwl_adapters/FtlLabel.cwl new file mode 100644 index 0000000..227be2a --- /dev/null +++ b/cwl_adapters/FtlLabel.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + binarizationThreshold: + inputBinding: + prefix: --binarizationThreshold + type: double + connectivity: + inputBinding: + prefix: --connectivity + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/ftl-label-plugin:0.3.12-dev5 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/KaggleNucleiSegmentation.cwl b/cwl_adapters/KaggleNucleiSegmentation.cwl new file mode 100644 index 0000000..66ec970 --- /dev/null +++ b/cwl_adapters/KaggleNucleiSegmentation.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string? + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory + preview: + inputBinding: + prefix: --preview + type: boolean? +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/kaggle-nuclei-segmentation-tool:0.1.5-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/OmeConverter.cwl b/cwl_adapters/OmeConverter.cwl new file mode 100644 index 0000000..66cefde --- /dev/null +++ b/cwl_adapters/OmeConverter.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + fileExtension: + inputBinding: + prefix: --fileExtension + type: string + filePattern: + inputBinding: + prefix: --filePattern + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl-adapters/basic-flatfield-estimation.cwl b/cwl_adapters/basic-flatfield-estimation.cwl similarity index 53% rename from cwl-adapters/basic-flatfield-estimation.cwl rename to cwl_adapters/basic-flatfield-estimation.cwl index 85c18ea..a56bafc 100644 --- a/cwl-adapters/basic-flatfield-estimation.cwl +++ b/cwl_adapters/basic-flatfield-estimation.cwl @@ -1,111 +1,83 @@ -#!/usr/bin/env cwl-runner +$namespaces: + cwltool: http://commonwl.org/cwltool# + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl class: CommandLineTool cwlVersion: v1.0 +doc: 'This WIPP plugin will take a collection of images and use the BaSiC flatfield + correction algorithm to generate a flatfield image, a darkfield image, and a photobleach + offset. -label: BaSiC Flatfield Estimation - -doc: |- - This WIPP plugin will take a collection of images and use the BaSiC flatfield correction algorithm to generate a flatfield image, a darkfield image, and a photobleach offset. - https://github.com/PolusAI/polus-plugins/tree/master/regression/basic-flatfield-estimation-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/basic-flatfield-estimation-plugin:2.1.1 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - -# "jax._src.xla_bridge - WARNING - An NVIDIA GPU may be present on this machine, but a CUDA-enabled jaxlib is not installed. Falling back to cpu." + https://github.com/PolusAI/polus-plugins/tree/master/regression/basic-flatfield-estimation-plugin' hints: cwltool:CUDARequirement: - cudaVersionMin: "11.4" - cudaComputeCapabilityMin: "3.0" - cudaDeviceCountMin: 1 + cudaComputeCapabilityMin: '3.0' cudaDeviceCountMax: 1 - + cudaDeviceCountMin: 1 + cudaVersionMin: '11.4' inputs: - inpDir: - label: Path to input images - doc: |- - Path to input images - type: Directory - inputBinding: - prefix: --inpDir - - getDarkfield: - label: If 'true', will calculate darkfield image - doc: |- - If 'true', will calculate darkfield image - type: boolean? - inputBinding: - prefix: --getDarkfield - - # photobleach: - # label: If 'true', will calculate photobleach scalar - # doc: |- - # If 'true', will calculate photobleach scalar - # type: boolean? - # inputBinding: - # prefix: --photobleach - filePattern: + doc: File pattern to subset data + inputBinding: + prefix: --filePattern label: File pattern to subset data - doc: |- - File pattern to subset data type: string? + getDarkfield: + doc: If 'true', will calculate darkfield image inputBinding: - prefix: --filePattern - + prefix: --getDarkfield + label: If 'true', will calculate darkfield image + type: boolean? groupBy: - label: Variables to group together - doc: |- - Variables to group together - type: string? + doc: Variables to group together inputBinding: prefix: --groupBy - - preview: - label: Generate a JSON file describing what the outputs should be - doc: |- - Generate a JSON file describing what the outputs should be - type: boolean? + label: Variables to group together + type: string? + inpDir: + doc: Path to input images inputBinding: - prefix: --preview - + prefix: --inpDir + label: Path to input images + type: Directory outDir: + doc: Output image collection + inputBinding: + prefix: --outDir label: Output image collection - doc: |- - Output image collection type: Directory + preview: + doc: Generate a JSON file describing what the outputs should be inputBinding: - prefix: --outDir - + prefix: --preview + label: Generate a JSON file describing what the outputs should be + type: boolean? +label: BaSiC Flatfield Estimation outputs: outDir: + doc: Output image collection label: Output image collection - doc: |- - Output image collection - type: Directory outputBinding: glob: $(inputs.outDir.basename) - + type: Directory preview_json: - label: JSON file describing what the outputs should be - doc: |- - JSON file describing what the outputs should be - type: File? # if --preview + doc: JSON file describing what the outputs should be format: edam:format_3464 + label: JSON file describing what the outputs should be outputBinding: glob: preview.json - -$namespaces: - edam: https://edamontology.org/ - cwltool: http://commonwl.org/cwltool# - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: https://raw.githubusercontent.com/PolusAI/polus-plugins/master/regression/basic-flatfield-estimation-plugin/plugin.json \ No newline at end of file + type: File? +requirements: + DockerRequirement: + dockerPull: polusai/basic-flatfield-estimation-plugin:2.1.1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/bbbcdownload.cwl b/cwl_adapters/bbbcdownload.cwl new file mode 100644 index 0000000..9eeba93 --- /dev/null +++ b/cwl_adapters/bbbcdownload.cwl @@ -0,0 +1,29 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + name: + inputBinding: + prefix: --name + type: string + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/bbbc-download-plugin:0.1.0-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl-adapters/file-renaming.cwl b/cwl_adapters/file-renaming.cwl similarity index 62% rename from cwl-adapters/file-renaming.cwl rename to cwl_adapters/file-renaming.cwl index 29628cd..8ce2769 100644 --- a/cwl-adapters/file-renaming.cwl +++ b/cwl_adapters/file-renaming.cwl @@ -1,81 +1,66 @@ +$namespaces: + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl class: CommandLineTool cwlVersion: v1.0 +doc: 'Rename and store image collection files in a new image collection -label: File Renaming - -doc: |- - Rename and store image collection files in a new image collection - https://github.com/PolusAI/polus-plugins/tree/master/formats/file-renaming-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/file-renaming-plugin:0.2.1-dev0 # NOTE: 0.2.3 not pushed yet - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - + https://github.com/PolusAI/polus-plugins/tree/master/formats/file-renaming-plugin' inputs: - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - filePattern: inputBinding: prefix: --filePattern type: string - + inpDir: + inputBinding: + prefix: --inpDir + type: Directory mapDirectory: inputBinding: prefix: --mapDirectory - type: string? # enum: raw, map, default - - preview: - label: Generate a JSON file describing what the outputs should be - doc: |- - Generate a JSON file describing what the outputs should be + type: string? + outDir: + doc: Output collection inputBinding: - prefix: --preview - type: boolean? - + prefix: --outDir + label: Output collection + type: Directory outFilePattern: inputBinding: prefix: --outFilePattern type: string - - outDir: - label: Output collection - doc: |- - Output collection + preview: + doc: Generate a JSON file describing what the outputs should be inputBinding: - prefix: --outDir - type: Directory - + prefix: --preview + label: Generate a JSON file describing what the outputs should be + type: boolean? +label: File Renaming outputs: outDir: + doc: Output collection label: Output collection - doc: |- - Output collection - type: Directory outputBinding: glob: $(inputs.outDir.basename) - + type: Directory preview_json: - label: JSON file describing what the outputs should be - doc: |- - JSON file describing what the outputs should be - type: File? # if --preview + doc: JSON file describing what the outputs should be format: edam:format_3464 + label: JSON file describing what the outputs should be outputBinding: glob: preview.json - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: https://raw.githubusercontent.com/PolusAI/polus-plugins/master/formats/file-renaming-plugin/plugin.json \ No newline at end of file + type: File? +requirements: + DockerRequirement: + dockerPull: polusai/file-renaming-plugin:0.2.1-dev0 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl-adapters/image_assembler.cwl b/cwl_adapters/image_assembler.cwl similarity index 56% rename from cwl-adapters/image_assembler.cwl rename to cwl_adapters/image_assembler.cwl index f6d179f..77466cc 100644 --- a/cwl-adapters/image_assembler.cwl +++ b/cwl_adapters/image_assembler.cwl @@ -1,97 +1,77 @@ -#!/usr/bin/env cwl-runner +$namespaces: + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl class: CommandLineTool cwlVersion: v1.0 +doc: 'This plugin assembles images into a stitched image using an image stitching + vector. -label: Image Assembler - -doc: |- - This plugin assembles images into a stitched image using an image stitching vector. - https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/image-assembler-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/image-assembler-plugin:1.4.0-dev0 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - $(inputs.stitchPath) # Must stage inputs for tools which do not accept full paths. - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - + https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/image-assembler-plugin' inputs: - stitchPath: - label: Path to directory containing "stitching vector" file img-global-positions-0.txt - doc: |- - Path to directory containing "stitching vector" file img-global-positions-0.txt - type: Directory - inputBinding: - prefix: --stitchPath - imgPath: - label: Path to input image collection - doc: |- - Path to input image collection - type: Directory + doc: Path to input image collection inputBinding: prefix: --imgPath - - timesliceNaming: - label: Label images by timeslice rather than analyzing input image names - doc: |- - Label images by timeslice rather than analyzing input image names + label: Path to input image collection + type: Directory + outDir: + doc: Output collection inputBinding: - prefix: --timesliceNaming - type: boolean? - + prefix: --outDir + label: Output collection + type: Directory preview: + doc: Generate a JSON file describing what the outputs should be + inputBinding: + prefix: --preview label: Generate a JSON file describing what the outputs should be - doc: |- - Generate a JSON file describing what the outputs should be type: boolean? + stitchPath: + doc: Path to directory containing "stitching vector" file img-global-positions-0.txt inputBinding: - prefix: --preview - - outDir: - label: Output collection - doc: |- - Output collection + prefix: --stitchPath + label: Path to directory containing "stitching vector" file img-global-positions-0.txt type: Directory + timesliceNaming: + doc: Label images by timeslice rather than analyzing input image names inputBinding: - prefix: --outDir - + prefix: --timesliceNaming + label: Label images by timeslice rather than analyzing input image names + type: boolean? +label: Image Assembler outputs: + assembled_image: + doc: JSON file with outputs + format: edam:format_3727 + label: The assembled montage image + outputBinding: + glob: '*.ome.tif' + type: File? outDir: + doc: Output collection label: Output collection - doc: |- - Output collection - type: Directory outputBinding: glob: $(inputs.outDir.basename) - - assembled_image: - label: The assembled montage image - doc: |- - JSON file with outputs - type: File? # if not --preview - # See https://bioportal.bioontology.org/ontologies/EDAM?p=classes&conceptid=format_3727 - format: edam:format_3727 - outputBinding: - glob: "*.ome.tif" - + type: Directory preview_json: - label: JSON file with outputs - doc: |- - JSON file with outputs - type: File? # if --preview + doc: JSON file with outputs format: edam:format_3464 + label: JSON file with outputs outputBinding: glob: preview.json - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: https://raw.githubusercontent.com/PolusAI/polus-plugins/master/transforms/images/image-assembler-plugin/plugin.json \ No newline at end of file + type: File? +requirements: + DockerRequirement: + dockerPull: polusai/image-assembler-plugin:1.4.0-dev0 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - $(inputs.stitchPath) + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl-adapters/montage.cwl b/cwl_adapters/montage.cwl similarity index 60% rename from cwl-adapters/montage.cwl rename to cwl_adapters/montage.cwl index bc90bb0..dbd7a79 100644 --- a/cwl-adapters/montage.cwl +++ b/cwl_adapters/montage.cwl @@ -1,119 +1,94 @@ -#!/usr/bin/env cwl-runner +$namespaces: + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl class: CommandLineTool cwlVersion: v1.0 +doc: 'This plugin generates a stitching vector that will montage images together. -label: Montage - -doc: |- - This plugin generates a stitching vector that will montage images together. - https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/montage-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/montage-plugin:0.5.0 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - + https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/montage-plugin' inputs: - inpDir: - label: Input image collection to be processed by this plugin - doc: |- - Input image collection to be processed by this plugin - type: Directory - inputBinding: - prefix: --inpDir - filePattern: + doc: Filename pattern used to parse data + inputBinding: + prefix: --filePattern label: Filename pattern used to parse data - doc: |- - Filename pattern used to parse data type: string + flipAxis: + doc: Axes to flip when laying out images inputBinding: - prefix: --filePattern - - layout: - label: Specify montage organization - doc: |- - Specify montage organization + prefix: --flipAxis + label: Axes to flip when laying out images type: string? - # optional array of strings? - inputBinding: - prefix: --layout - gridSpacing: - label: Specify spacing between images in the lowest grid - doc: |- - Specify spacing between images in the lowest grid + doc: Specify spacing between images in the lowest grid inputBinding: prefix: --gridSpacing + label: Specify spacing between images in the lowest grid type: int? - imageSpacing: - label: Specify spacing multiplier between grids - doc: |- - Specify spacing multiplier between grids + doc: Specify spacing multiplier between grids inputBinding: prefix: --imageSpacing + label: Specify spacing multiplier between grids type: int? - - flipAxis: - label: Axes to flip when laying out images - doc: |- - Axes to flip when laying out images + inpDir: + doc: Input image collection to be processed by this plugin inputBinding: - prefix: --flipAxis - type: string? - - preview: - label: Generate a JSON file describing what the outputs should be - doc: |- - Generate a JSON file describing what the outputs should be - type: boolean? + prefix: --inpDir + label: Input image collection to be processed by this plugin + type: Directory + layout: + doc: Specify montage organization inputBinding: - prefix: --preview - + prefix: --layout + label: Specify montage organization + type: string? outDir: + doc: Output collection + inputBinding: + prefix: --outDir label: Output collection - doc: |- - Output collection type: Directory + preview: + doc: Generate a JSON file describing what the outputs should be inputBinding: - prefix: --outDir - + prefix: --preview + label: Generate a JSON file describing what the outputs should be + type: boolean? +label: Montage outputs: + global_positions: + doc: The "stitching vector", i.e. the positions of the individual images in the + montage + label: The "stitching vector", i.e. the positions of the individual images in + the montage + outputBinding: + glob: $(inputs.outDir.basename)/img-global-positions-0.txt + type: File? outDir: + doc: Output collection label: Output collection - doc: |- - Output collection - type: Directory outputBinding: glob: $(inputs.outDir.basename) - - global_positions: - label: The "stitching vector", i.e. the positions of the individual images in the montage - doc: |- - The "stitching vector", i.e. the positions of the individual images in the montage - type: File? # if not --preview - outputBinding: - glob: $(inputs.outDir.basename)/img-global-positions-0.txt - + type: Directory preview_json: - label: JSON file describing what the outputs should be - doc: |- - JSON file describing what the outputs should be - type: File? # if --preview + doc: JSON file describing what the outputs should be format: edam:format_3464 + label: JSON file describing what the outputs should be outputBinding: glob: preview.json - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# manifest: https://raw.githubusercontent.com/PolusAI/polus-plugins/master/transforms/images/montage-plugin/plugin.json \ No newline at end of file + type: File? +requirements: + DockerRequirement: + dockerPull: polusai/montage-plugin:0.5.0 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/ome-converter.cwl b/cwl_adapters/ome-converter.cwl new file mode 100644 index 0000000..43171e9 --- /dev/null +++ b/cwl_adapters/ome-converter.cwl @@ -0,0 +1,57 @@ +$namespaces: + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl +class: CommandLineTool +cwlVersion: v1.0 +doc: 'This WIPP plugin converts BioFormats supported data types to the OME Zarr file + format. + + https://github.com/PolusAI/polus-plugins/tree/master/formats/ome-converter-plugin' +inputs: + fileExtension: + default: default + doc: The file extension + inputBinding: + prefix: --fileExtension + label: The file extension + type: string + filePattern: + doc: A filepattern, used to select data for conversion + inputBinding: + prefix: --filePattern + label: A filepattern, used to select data for conversion + type: string + inpDir: + doc: Input generic data collection to be processed by this plugin + inputBinding: + prefix: --inpDir + label: Input generic data collection to be processed by this plugin + type: Directory + outDir: + doc: Output collection + inputBinding: + prefix: --outDir + label: Output collection + type: Directory +label: OME Zarr Converter +outputs: + outDir: + doc: Output collection + label: Output collection + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl-adapters/precompute_slide.cwl b/cwl_adapters/precompute_slide.cwl similarity index 64% rename from cwl-adapters/precompute_slide.cwl rename to cwl_adapters/precompute_slide.cwl index 2419170..35ab194 100644 --- a/cwl-adapters/precompute_slide.cwl +++ b/cwl_adapters/precompute_slide.cwl @@ -1,77 +1,61 @@ -#!/usr/bin/env cwl-runner +$namespaces: + edam: https://edamontology.org/ +$schemas: +- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl class: CommandLineTool cwlVersion: v1.0 +doc: 'This plugin generates image pyramids in multiple viewing formats. -label: Precompute Slide - -doc: |- - This plugin generates image pyramids in multiple viewing formats. - https://github.com/PolusAI/polus-plugins/tree/master/visualization/polus-precompute-slide-plugin - -requirements: - DockerRequirement: - dockerPull: polusai/precompute-slide-plugin:1.7.0-dev0 - # See https://www.commonwl.org/v1.0/CommandLineTool.html#InitialWorkDirRequirement - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true # Output directories must be writable - InlineJavascriptRequirement: {} - + https://github.com/PolusAI/polus-plugins/tree/master/visualization/polus-precompute-slide-plugin' inputs: - inpDir: - label: Input generic data collection to be processed by this plugin - doc: |- - Input generic data collection to be processed by this plugin - type: Directory - inputBinding: - prefix: --inpDir - - pyramidType: - label: Build a DeepZoom, Neuroglancer, Zarr pyramid - doc: |- - Build a DeepZoom, Neuroglancer, Zarr pyramid - type: string # enum: DeepZoom, Neuroglancer, Zarr + filePattern: + doc: Filename pattern used to parse data inputBinding: - prefix: --pyramidType - + prefix: --filePattern + label: Filename pattern used to parse data + type: string? imageType: - label: Image is either Segmentation or Image - doc: |- - Image is either Segmentation or Image + doc: Image is either Segmentation or Image inputBinding: prefix: --imageType + label: Image is either Segmentation or Image type: string - - filePattern: - label: Filename pattern used to parse data - doc: |- - Filename pattern used to parse data - type: string? + inpDir: + doc: Input generic data collection to be processed by this plugin inputBinding: - prefix: --filePattern - + prefix: --inpDir + label: Input generic data collection to be processed by this plugin + type: Directory outDir: + doc: Output collection + inputBinding: + prefix: --outDir label: Output collection - doc: |- - Output collection type: Directory + pyramidType: + doc: Build a DeepZoom, Neuroglancer, Zarr pyramid inputBinding: - prefix: --outDir - + prefix: --pyramidType + label: Build a DeepZoom, Neuroglancer, Zarr pyramid + type: string +label: Precompute Slide outputs: outDir: + doc: Output collection label: Output collection - doc: |- - Output collection - type: Directory outputBinding: glob: $(inputs.outDir.basename) - -$namespaces: - edam: https://edamontology.org/ - -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl - -# \ No newline at end of file + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/precompute-slide-plugin:1.7.0-dev0 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/pyproject.toml b/pyproject.toml index 85287fa..9ae0fcf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,20 +1,26 @@ [tool.poetry] -name = "polus-image-workflows" +name = "image-workflows" version = "0.1.1-dev1" description = "Build and execute pipelines of polus plugins on Compute." authors = ["Hamdah Shafqat Abbasi "] readme = "README.md" -packages = [{include = "polus", from = "src"}] +packages = [{include = "image", from = "src"}] [tool.poetry.dependencies] python = ">=3.9,<3.12" typer = "^0.9.0" pyyaml = "^6.0.1" pydantic = "^2.6.1" -cwl-utils="0.31" -toil="^5.12" -polus-plugins = {path = "../image-tools", develop = true} -workflow-inference-compiler = {path = "../workflow-inference-compiler", develop = true} +sophios = "^0.1.4" +cwlref-runner = "1.0" +cwltool = "^3.1.20240909164951" +graphviz = "^0.20.3" +polus-tools = { git = "https://github.com/PolusAI/tools.git" } + + + + + [tool.poetry.group.dev.dependencies] jupyter = "^1.0.0" diff --git a/src/image/workflows/__init__.py b/src/image/workflows/__init__.py new file mode 100644 index 0000000..15527f6 --- /dev/null +++ b/src/image/workflows/__init__.py @@ -0,0 +1,2 @@ +from .cwl_analysis import CWLAnalysisWorkflow +from .cwl_nuclear_segmentation import CWLSegmentationWorkflow \ No newline at end of file diff --git a/src/polus/image/workflows/__main__.py b/src/image/workflows/__main__.py similarity index 67% rename from src/polus/image/workflows/__main__.py rename to src/image/workflows/__main__.py index 863f1ef..7935dcd 100644 --- a/src/polus/image/workflows/__main__.py +++ b/src/image/workflows/__main__.py @@ -2,10 +2,11 @@ import logging import typer from pathlib import Path -from polus.image.workflows.utils import LoadYaml -from workflows.cwl_analysis import CWLAnalysisWorkflow -from workflows.cwl_nuclear_segmentation import CWLSegmentationWorkflow -from pathlib import Path +from utils import LoadYaml +# from cwl_analysis import CWLAnalysisWorkflow +from cwl_nuclear_segmentation import CWLSegmentationWorkflow + + app = typer.Typer() @@ -40,17 +41,17 @@ def main( logger.info(f"name = {name}") logger.info(f"workflow = {workflow}") - config_path = Path(__file__).parent.parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") - print(config_path) + config_path = Path(__file__).parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") + model = LoadYaml(workflow=workflow, config_path=config_path) params = model.parse_yaml() - if workflow == "analysis": - logger.info(f"Executing {workflow}!!!") - model = CWLAnalysisWorkflow(**params) - model.workflow() + # if workflow == "analysis": + # logger.info(f"Executing {workflow}!!!") + # model = CWLAnalysisWorkflow(**params) + # model.workflow() if workflow == "segmentation": logger.info(f"Executing {workflow}!!!") @@ -58,7 +59,7 @@ def main( model.workflow() - logger.info("Completed CWL workflow!!!") + # logger.info("Completed CWL workflow!!!") if __name__ == "__main__": diff --git a/src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc b/src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fa22e4e95533c355b0ad0d339bda07158bfee24 GIT binary patch literal 7048 zcma)B&6C{5bq5+44CZr}%O$C$C<>IEgrj6&PfE;qb$>p4Uw-d`w%=}(szIMsAGh66xboYDx z@ZRh9wQ;Rh*6@pWf7N~CPc`j7>1Fk2;N`n`l7B_RHLi!+KzH>4bD2)>EHnlt({yc6 za4j`sgvEjF+OyS?TT;8qZdvsex1##0TUC9{t*O55*3p~c+F;#X*ELa|o)a6#x_e%n zpKgls{)J;**Y0S%z^$(|Zarh}Mb7SNjp8psm)2mJc`p^i%zhBdN8Ki`MZVHnZ@K|_? zcx*f+QP{`(md4AAQDrfzs!RiFt76~n5px(`BnZVMr#;3{2E5<{0;Er8`-4~AAEXG z^adhI{WOT9IamBUkZ0M!?}mdd&fcgK3ST;_d!1o06k!mFW;x%PrZQ-cQjzSacY==m zfpB)5J?i4F6Ab)b5cM4Hr+y+*nz|`tpF1~$nAU`IQ$(Hqz?YAl592Tt9U{uP-cQqE zveRm{+wD%XO%Oqpq(O?qnz8J)6s!k~Nu#+qyBmbU8~SM~WCR)@V>Rl=(i!?PAv&ag z;LtWAGg(5$qtrVMEk##IDYygpfuB;h1YMk&6Y_@-4Wd*;Nst~Yc8Dbw$M1qdM-n+i z-9cYO&PWkD@P{4`B=`}_!^LTd=)K|tM~QIKSbd{#uw^0K)8{SloWKrNId}cxkSKtB zx^c)s6me*=zuTQ3yW`yNFTVh1<~$aIVHh8RS)d~kBu1wj`e`>1A$L0QU>HZxxWwto zc;KXjp}QN%r2zQyfd4Ok4xRZU_!so|WIP&r?ZX}C4vpwbpZGHH+o1qWGr`XEWdYRo zJC6u3;$A0)CMeb1ac(^o9jKwRT*e2^mlbfP-OfznUshdJs%-tVnq{>U>XTJg5h${UM&@`)DTmMDw%>L#s~=v}{sBYfLP( z=A?+WFtO2E6BBK5Qb21@%4kbzIjua?_pABew8r&g!Uk&%OT+fuYUd#IeC`jS<4I#p zmT;aVpCXA2d5)SoHEYzYQ&Xj;M$HB_CYsFl@~i1(7H%=LD6{wDAcE%gv)UXzsb{}Y z$xJ9;_w-3itf!7sB+yAe)8uRDe%ot(k$~8i-){R!(0{MB8+S%@os!lkal3WnDGUr* z%-sHLAIw}z(n@3}pKC4W=<}o8jKWeaZ)J0MNFNnMLF##LRjh+q8}SmcC1Ye&bV>Rj&yEJ&56$8 zt)_)z=mYuDe`Z@+j~(meMaOS_7)MWpOr2pkf-TQ2Jg#{M3Lc_OM00Fi_a@toKhJbW z+Bi+>Xnsk}Uc<_m6q%8X+F1bxJQP_`iecz?L}rp4v%+XN6tcnOC4y8+Lb6Z-ky{vL zmZ#480ON#I;xfCckFTD|7`6EXJ4odTP_g418072#GuRQEFu4xKuqiP_|B3b-!2>t( z>4ov-J8)UPlfccvV&ujev<5N9CqHanZ@$wQ8_jo`@(tirU*0g}Rg5z$f%EM2vx2&; zn5B=)^ru+?uB6k?KTdt7`oc{wKf_aJc=;JO0Fn?ptIX8bnaRd~appI_0{jFA3P3}s z6bE)O;QA3GE=*Xi6h{U(j?4*^gPX@t0)!{lCB>%+0-MFWy`>!+m$V~eVt_-X@%uM8 zSC%jd{iILUzb7Igeb~pP`EvXSQOLr=k4x7dUT;jUKWsF>#fJ~yefZv(J>(hlI!bZI zMX&RN&JI7yb%}U8W2<}<yV7XbUF3jX=Y!8QQ{sM zOGUKT7VrQRIFNDai$o-!0B5p^#x&~SeU&kty|~EO@7W8pF#h(LQut$dGh-yf6LO(T zx{ym7FR7O5Q@W=5Gm?-2`RH33#s=nLf~N(N7^DDmeaxmX)BD9K4D@pTCmJ`NT28dDA!!;SLb`pO$~6BLx%`S56E& zz?6{dsk5qnQNOI4FKolsO?~|KSyEnsmZW?qpSzmn4xc$>2*`^7kuWs7gUwr;);vhD@6Qm!F+A3<9iD%A;WPG=k_>V+7lw!~t z#^EU0&bK5gkoI_d5QT`PTcb1x^SSN$+-zqn2WajmaWwvZtM3nz{-eHs;LDlMBfcgr zifiU~j>hv4wv`9V`G)QJ>^1?N!2Iw`%y7J-6C*o8o*}(>uzXqI#^g4(kW&)!RdhdS zMM2W{AAj>N-#;x%oKC!)sR=1(ih{`Apd7117XOay+n0hLjOIoS) zql6fd6JKET6kFhgtO zJ9m{yRy6{6`yfK@GmMqP7txbI#*xZC!%6z@d){^3clUi-s-Ar;iz5F&CHKJyL)R~YxSXCxxMoP_uQ@M$>00( z^Cf(yAMB04b4$uts`OO~=fF=8l{lkFJcV0J;na}pBM&_2!1R)+)X~r`ie@d$7;+Dp*}ZPp7Y=KgUaGH*jVd(B{gS z6$MvNa0TzYf-88N6BBp}-UYm0M1KkWYv{Ltp$ZHI55+#Tmlx;P6dpRSE`K^H0NU~@ zQBDs=3a{l^fUg7ITA^S~K^7+#V2WOaucLjPpF?{EyEm}A=$Yvo)2sYET}ySszaxET zbp4T*Ta#k1cw|S#*&MwWj_mYJO5gVxqL0nz>=`>MO-fUTVnX!Ss8={u5tU!W6~8q8 z)}jJ(LtXP(4tuh)v@8Xp=%;@2ThNqoMOEyewkYq&zoMC%@&YK^<#Uvcj_oFHY%>kI z*se+}vxtCBF$WW?|yYmtoFbsOY;Fi(P4sr{shNnt6ZmHXypHI~sl&$CQ zsA;42d;f4KP!Nivh{|YBPz!Z0w1WsWxUmYg?)MRn^-+109|2r`Obx}-S&@o|WD*TI ziW+4V|GOqB8D)A@WRs*(k~Bt=R!EY*B+(*KM1-GY_MF3~jp*mt^dmgUKcQJK*f5~$ zi+BKIvyO@k&01`og?j;i+>Gn{GJ)?e^Jf&A68($V;1|ZU1VR|Hv5fTV*hQI*?-xU z@~=0x?1Ej|*xIO+YxVOAXXBE4A&H0mAWoY8FhCV8?St-NkdsH+;+|KrYE$JX%~|T> z7SwB2oW09;u@i477=}Sx)fnZjnj)7Bmdq{$u7<5T6q zk)u5IYml}OY^(V=Wq^$qj? E06%%!y#N3J literal 0 HcmV?d00001 diff --git a/src/image/workflows/__pycache__/utils.cpython-310.pyc b/src/image/workflows/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23397ca67a45946c94b3e449e6753569a209fb74 GIT binary patch literal 2779 zcma)8-H+Qu5Z|?(#Bq||QQB(@mGJS=DiS1Z`4B>jAwuIu_;@vfWY-j zh$A%3`wG*LtWv=yoN0*y&}uKiVECA(OJuqR^eB9JHIpE7(AoW}4+99x|kt>73_t-tWnBG>o^$rZr&0m}gQXx|HL{PX*H>DO7aKendQi zcoalzPHwYg2h9o|C#6IK5Vr$aft|`Kk{S{pNgW9$NH&l(k+hH;1Cqd6xX|tjEY$%b zNJKe_oZCdW!n3Oa_iE~#A+sfFoUTDG0cqFNuQJU(E4j#x$AR5w`^JxB#4e6a5XXa( zj{w2gG~@XGh~?8a(uI#1S7|A+R*bh@mbqDqV^$QUX1F;O$Ipm71#d0+bE|tBa-_O! zdz-1Of2n)5OhyC9m+F36ZgyttMm7~ zHs@2%7PYre_i5CyHqH?n4|F}y!7`ufG(pnBl)ySROG)bRCT~6Vq9uL|(z0ayn^a)mb@u63RJ*%Wd;5+&J=NO;Z3xtv zoLCGG`&D%&?U0v3g-{Bic$6u<8s5qU#0}yw>5F71%yh_lEGya-iD8+l)l((_e@QE%abvo@8lK+tj($zcOdAzC@y z^~|;H_18dEVUzftH^u3d=W$xFNGNE}E^I!uc44VsfdEM4o~@#N=lj;TL_1I1Z^%=+ zNAJMo@rXzET}lf7B2lLvQcCc57nb(ECMm_Xd+hJhCzYqYl5s zyDnBGe51x|JC3{w3G&YnUO`@?W&J7G>0rl|H+lUZ76E99BNpO_MFT7ZZyd3(C(`8N zEheEFd$#eyX|s*wv@LPghiNH8!IFN6QxrP1W2oF%013fCpk73fu!eg8(wD&(&K_*< z8#q#7d$7$PoclPM>4r@mRFKoTU}$XUgJ;yxbed3G?!j6Py#=S;@bV?}i6wiMOwJ!> zZU`}p*p7;|yi8cGE_dcOTa&f*JP&nWgrErJ3P+?0nH08fxk$4@@c-^y`38K>9!Zn; z|JmI~{b83|bPg+bR!6I=@*Mb>to<+Zg?gX6z>~flibB2&u8fD9jp~Y}BFzn z_mRv8gynEA@(d7LrudnFV93j~={WQpZPp#SLSL;}opwup02-zSf3W&_wtX_%a zGX0Q^@d7AC!SzqJKEC~7ytRH^z6%2R9!l#ULbupIAok?8ZvQPN None: - """Create directories for CWL outputs""" - cwl_path = self.PATH.joinpath("cwl_adapters") - cwl_path.mkdir(parents=True, exist_ok=True) - workflow_path = self.PATH.joinpath("outputs").resolve() - workflow_path.mkdir(exist_ok=True) - return cwl_path, workflow_path - - def _clean(self) -> None: - """Cleaning of redundant directories generating on running CWL""" - logger.info("Cleaning directories!!!") - destination_path = self.workflow_path.joinpath("experiment") - dir_names = ("autogenerated", "cachedir", "RUNS", "provenance", "cwl_adapters") - dir_list = [w for w in self.wic_path.iterdir() if w.is_dir() if w.name in dir_names] - for d in dir_list: - shutil.rmtree(d) - for d in destination_path.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.rmtree(d) - for d in self.PATH.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.move(d, destination_path) - - return - - def _move_outputs(self) -> None: - """Transfer outputs from the WIC directory to the workflow path""" - logger.info("Move outputs to workflow path!!!") - for d in self.wic_path.iterdir(): - if d.name.endswith("outdir"): - shutil.move(d, self.workflow_path) - return + self.adapters_path = Path(__file__).parent.parent.parent.parent.joinpath("cwl_adapters") def _camel(self, name: str) -> str: """Convert plugin name to camel case.""" @@ -119,14 +85,26 @@ def _add_backslash_before_parentheses(self, x): ) return result - def create_step(self, url: str) -> api.Step: + # def create_step(self, url: str) -> Step: + # """Generate the plugin class name from the plugin name specified in the manifest""" + # manifest = dict(pp.submit_plugin(url)) + # plugin_version = str(manifest.version) + # cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( + # self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + # ) + # self.modify_cwl() + # step = Step(cwl_tool) + # return step + + def create_step(self, url: str) -> Step: """Generate the plugin class name from the plugin name specified in the manifest""" - manifest = pp.submit_plugin(url) - plugin_version = str(manifest.version) - cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( - self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + manifest = dict(pp.submit_plugin(url)) + plugin_version = str(manifest['version']) + cwl_tool = pp.get_plugin(self._camel(manifest['name']), plugin_version).save_cwl( + self.adapters_path.joinpath(f"{self._camel(manifest['name'])}.cwl") ) - step = api.Step(cwl_tool) + self.modify_cwl() + step = Step(cwl_tool) return step def manifest_urls(self, x: str) -> str: @@ -144,7 +122,7 @@ def manifest_urls(self, x: str) -> str: def modify_cwl(self) -> None: """Modify CWL to incorporate environmental variables and permission access""" - for f in list(self.cwl_path.rglob("*.cwl")): + for f in list(self.adapters_path.rglob("*.cwl")): if "cwl" in f.name: try: with Path.open(f, "r") as file: @@ -220,7 +198,7 @@ def workflow(self) -> None: ## FTL Label Plugin ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir - ftl_plugin.connectivity = 1 + ftl_plugin.connectivity = "1" ftl_plugin.binarizationThreshold = 0.5 ftl_plugin.outDir = Path("ftl_plugin.outDir") @@ -241,22 +219,14 @@ def workflow(self) -> None: rename, ome_converter, kaggle_nuclei_segmentation, - ftl_plugin] + ftl_plugin + ] - workflow = api.Workflow(steps, "experiment", self.workflow_path) - # # Saving CLT for plugins - workflow._save_all_cwl(overwrite=True) - # # Adding environmental variables for bbbc_download and ome_converter plugin - self.modify_cwl() - # # # Save yaml to run CWL tool - workflow._save_yaml() + workflow = Workflow(steps, f"{self.name}_workflow") # Compile and run using WIC python API - workflow.compile(run_local=True, overwrite=False) - # # print(workflow.yml_path) - # # clean autognerated directories - self._clean() - self._move_outputs() + workflow.compile() + workflow.run() logger.info("Completed CWL nuclear segmentation workflow.") return \ No newline at end of file diff --git a/src/polus/image/workflows/utils.py b/src/image/workflows/utils.py similarity index 100% rename from src/polus/image/workflows/utils.py rename to src/image/workflows/utils.py diff --git a/src/polus/image/workflows/__init__.py b/src/polus/image/workflows/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/workflows/__init__.py b/workflows/__init__.py deleted file mode 100644 index e69de29..0000000 From 18f538458d452506077fb98bdefaba2d600dbfed Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Fri, 27 Sep 2024 15:15:40 -0400 Subject: [PATCH 05/11] updated visualization workflow --- .../check_membership/package-lock.json | 321 ------------------ .../my_actions/check_membership/package.json | 17 - .../repository_dispatch/package-lock.json | 321 ------------------ .../repository_dispatch/package.json | 17 - configuration/segmentation/BBBC001.yml | 8 +- configuration/visualization/BBBC001.yml | 14 + configuration/visualization/BBBC039.yml | 11 + configuration/visualization/__init__.py | 0 configuration/visualization/sample.yml | 12 + cwl_adapters/FileRenaming.cwl | 41 --- cwl_adapters/FtlLabel.cwl | 37 -- cwl_adapters/KaggleNucleiSegmentation.cwl | 37 -- cwl_adapters/OmeConverter.cwl | 37 -- cwl_adapters/basic-flatfield-estimation.cwl | 83 ----- cwl_adapters/bbbcdownload.cwl | 29 -- cwl_adapters/file-renaming.cwl | 66 ---- cwl_adapters/image_assembler.cwl | 77 ----- cwl_adapters/montage.cwl | 94 ----- cwl_adapters/ome-converter.cwl | 57 ---- cwl_adapters/precompute_slide.cwl | 61 ---- pyproject.toml | 3 - src/image/workflows/__main__.py | 35 +- .../cwl_nuclear_segmentation.cpython-310.pyc | Bin 7048 -> 0 bytes .../__pycache__/utils.cpython-310.pyc | Bin 2779 -> 0 bytes src/image/workflows/cwl_analysis.py | 316 +++++++---------- .../workflows/cwl_nuclear_segmentation.py | 261 ++++++-------- src/image/workflows/cwl_visualization.py | 219 ++++++++++++ src/image/workflows/utils.py | 104 ++++-- 28 files changed, 586 insertions(+), 1692 deletions(-) delete mode 100644 .github/my_actions/check_membership/package-lock.json delete mode 100644 .github/my_actions/check_membership/package.json delete mode 100644 .github/my_actions/repository_dispatch/package-lock.json delete mode 100644 .github/my_actions/repository_dispatch/package.json create mode 100644 configuration/visualization/BBBC001.yml create mode 100644 configuration/visualization/BBBC039.yml create mode 100644 configuration/visualization/__init__.py create mode 100644 configuration/visualization/sample.yml delete mode 100644 cwl_adapters/FileRenaming.cwl delete mode 100644 cwl_adapters/FtlLabel.cwl delete mode 100644 cwl_adapters/KaggleNucleiSegmentation.cwl delete mode 100644 cwl_adapters/OmeConverter.cwl delete mode 100644 cwl_adapters/basic-flatfield-estimation.cwl delete mode 100644 cwl_adapters/bbbcdownload.cwl delete mode 100644 cwl_adapters/file-renaming.cwl delete mode 100644 cwl_adapters/image_assembler.cwl delete mode 100644 cwl_adapters/montage.cwl delete mode 100644 cwl_adapters/ome-converter.cwl delete mode 100644 cwl_adapters/precompute_slide.cwl delete mode 100644 src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc delete mode 100644 src/image/workflows/__pycache__/utils.cpython-310.pyc create mode 100644 src/image/workflows/cwl_visualization.py diff --git a/.github/my_actions/check_membership/package-lock.json b/.github/my_actions/check_membership/package-lock.json deleted file mode 100644 index 6dac377..0000000 --- a/.github/my_actions/check_membership/package-lock.json +++ /dev/null @@ -1,321 +0,0 @@ -{ - "name": "my_actions", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "my_actions", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "node-fetch": "^3.3.2" - } - }, - "node_modules/@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/github": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", - "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/http-client": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz", - "integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==", - "dependencies": { - "tunnel": "^0.0.6" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/request/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "engines": { - "node": ">= 12" - } - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } - }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" - } - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/web-streams-polyfill": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", - "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - } - } -} diff --git a/.github/my_actions/check_membership/package.json b/.github/my_actions/check_membership/package.json deleted file mode 100644 index 7c7a14c..0000000 --- a/.github/my_actions/check_membership/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "my_actions", - "version": "1.0.0", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "MIT", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "node-fetch": "^3.3.2" - }, - "description": "" -} diff --git a/.github/my_actions/repository_dispatch/package-lock.json b/.github/my_actions/repository_dispatch/package-lock.json deleted file mode 100644 index 6dac377..0000000 --- a/.github/my_actions/repository_dispatch/package-lock.json +++ /dev/null @@ -1,321 +0,0 @@ -{ - "name": "my_actions", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "my_actions", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "node-fetch": "^3.3.2" - } - }, - "node_modules/@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/github": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz", - "integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==", - "dependencies": { - "@actions/http-client": "^2.0.1", - "@octokit/core": "^3.6.0", - "@octokit/plugin-paginate-rest": "^2.17.0", - "@octokit/plugin-rest-endpoint-methods": "^5.13.0" - } - }, - "node_modules/@actions/http-client": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz", - "integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==", - "dependencies": { - "tunnel": "^0.0.6" - } - }, - "node_modules/@octokit/auth-token": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", - "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", - "dependencies": { - "@octokit/types": "^6.0.3" - } - }, - "node_modules/@octokit/core": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz", - "integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==", - "dependencies": { - "@octokit/auth-token": "^2.4.4", - "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.6.3", - "@octokit/request-error": "^2.0.5", - "@octokit/types": "^6.0.3", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/endpoint": { - "version": "6.0.12", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", - "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", - "dependencies": { - "@octokit/types": "^6.0.3", - "is-plain-object": "^5.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/graphql": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz", - "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==", - "dependencies": { - "@octokit/request": "^5.6.0", - "@octokit/types": "^6.0.3", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "12.11.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz", - "integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==" - }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "2.21.3", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz", - "integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==", - "dependencies": { - "@octokit/types": "^6.40.0" - }, - "peerDependencies": { - "@octokit/core": ">=2" - } - }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz", - "integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==", - "dependencies": { - "@octokit/types": "^6.39.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - } - }, - "node_modules/@octokit/request": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz", - "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==", - "dependencies": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.1.0", - "@octokit/types": "^6.16.1", - "is-plain-object": "^5.0.0", - "node-fetch": "^2.6.7", - "universal-user-agent": "^6.0.0" - } - }, - "node_modules/@octokit/request-error": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", - "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", - "dependencies": { - "@octokit/types": "^6.0.3", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "node_modules/@octokit/request/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/@octokit/types": { - "version": "6.41.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz", - "integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==", - "dependencies": { - "@octokit/openapi-types": "^12.11.0" - } - }, - "node_modules/before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "engines": { - "node": ">= 12" - } - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } - }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" - } - }, - "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/universal-user-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", - "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/web-streams-polyfill": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", - "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - } - } -} diff --git a/.github/my_actions/repository_dispatch/package.json b/.github/my_actions/repository_dispatch/package.json deleted file mode 100644 index 7c7a14c..0000000 --- a/.github/my_actions/repository_dispatch/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "my_actions", - "version": "1.0.0", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "MIT", - "dependencies": { - "@actions/core": "^1.10.0", - "@actions/github": "^5.1.1", - "node-fetch": "^3.3.2" - }, - "description": "" -} diff --git a/configuration/segmentation/BBBC001.yml b/configuration/segmentation/BBBC001.yml index 4ed7653..a4cbe36 100644 --- a/configuration/segmentation/BBBC001.yml +++ b/configuration/segmentation/BBBC001.yml @@ -1,11 +1,11 @@ --- -name : BBBC001 -file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif -out_file_pattern : x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +name: BBBC001 +file_pattern: /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern: x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" group_by: c map_directory: false -background_correction: false \ No newline at end of file +background_correction: false diff --git a/configuration/visualization/BBBC001.yml b/configuration/visualization/BBBC001.yml new file mode 100644 index 0000000..7f5cb25 --- /dev/null +++ b/configuration/visualization/BBBC001.yml @@ -0,0 +1,14 @@ +--- +name: BBBC001 +file_pattern: /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}f{f:dd}d{channel:d}.tif +out_file_pattern: x{row:dd}_y{col:dd}_p{f:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c0.ome.tif +ff_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x00_y03_p0\\(0-5\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false +layout: p +pyramid_type: Zarr +image_type: Intensity diff --git a/configuration/visualization/BBBC039.yml b/configuration/visualization/BBBC039.yml new file mode 100644 index 0000000..1884878 --- /dev/null +++ b/configuration/visualization/BBBC039.yml @@ -0,0 +1,11 @@ +--- +name : BBBC039 +file_pattern : /.*/.*/.*/Images/.*/.*_{row:c}{col:dd}_s{s:d}_w{channel:d}.*.tif +out_file_pattern : x{row:dd}_y{col:dd}_p{s:dd}_c{channel:d}.tif +image_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c{c:d}.ome.tif +seg_pattern: x{x:dd}_y{y:dd}_p{p:dd}_c1.ome.tif +ff_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_flatfield.ome.tif" +df_pattern: "x\\(00-15\\)_y\\(01-24\\)_p0\\(1-9\\)_c{c:d}_darkfield.ome.tif" +group_by: c +map_directory: false +background_correction: false \ No newline at end of file diff --git a/configuration/visualization/__init__.py b/configuration/visualization/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/configuration/visualization/sample.yml b/configuration/visualization/sample.yml new file mode 100644 index 0000000..ecc82e1 --- /dev/null +++ b/configuration/visualization/sample.yml @@ -0,0 +1,12 @@ +--- +name : +file_pattern : +out_file_pattern : +image_pattern: +seg_pattern: +ff_pattern: +df_pattern: +group_by: +map_directory: +features: +file_extension: \ No newline at end of file diff --git a/cwl_adapters/FileRenaming.cwl b/cwl_adapters/FileRenaming.cwl deleted file mode 100644 index 1e154ed..0000000 --- a/cwl_adapters/FileRenaming.cwl +++ /dev/null @@ -1,41 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.2 -inputs: - filePattern: - inputBinding: - prefix: --filePattern - type: string - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - mapDirectory: - inputBinding: - prefix: --mapDirectory - type: boolean? - outDir: - inputBinding: - prefix: --outDir - type: Directory - outFilePattern: - inputBinding: - prefix: --outFilePattern - type: string -outputs: - outDir: - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/file-renaming-tool:0.2.4-dev1 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/FtlLabel.cwl b/cwl_adapters/FtlLabel.cwl deleted file mode 100644 index 227be2a..0000000 --- a/cwl_adapters/FtlLabel.cwl +++ /dev/null @@ -1,37 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.2 -inputs: - binarizationThreshold: - inputBinding: - prefix: --binarizationThreshold - type: double - connectivity: - inputBinding: - prefix: --connectivity - type: string - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - outDir: - inputBinding: - prefix: --outDir - type: Directory -outputs: - outDir: - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/ftl-label-plugin:0.3.12-dev5 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/KaggleNucleiSegmentation.cwl b/cwl_adapters/KaggleNucleiSegmentation.cwl deleted file mode 100644 index 66ec970..0000000 --- a/cwl_adapters/KaggleNucleiSegmentation.cwl +++ /dev/null @@ -1,37 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.2 -inputs: - filePattern: - inputBinding: - prefix: --filePattern - type: string? - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - outDir: - inputBinding: - prefix: --outDir - type: Directory - preview: - inputBinding: - prefix: --preview - type: boolean? -outputs: - outDir: - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/kaggle-nuclei-segmentation-tool:0.1.5-dev1 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/OmeConverter.cwl b/cwl_adapters/OmeConverter.cwl deleted file mode 100644 index 66cefde..0000000 --- a/cwl_adapters/OmeConverter.cwl +++ /dev/null @@ -1,37 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.2 -inputs: - fileExtension: - inputBinding: - prefix: --fileExtension - type: string - filePattern: - inputBinding: - prefix: --filePattern - type: string - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - outDir: - inputBinding: - prefix: --outDir - type: Directory -outputs: - outDir: - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/basic-flatfield-estimation.cwl b/cwl_adapters/basic-flatfield-estimation.cwl deleted file mode 100644 index a56bafc..0000000 --- a/cwl_adapters/basic-flatfield-estimation.cwl +++ /dev/null @@ -1,83 +0,0 @@ -$namespaces: - cwltool: http://commonwl.org/cwltool# - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'This WIPP plugin will take a collection of images and use the BaSiC flatfield - correction algorithm to generate a flatfield image, a darkfield image, and a photobleach - offset. - - https://github.com/PolusAI/polus-plugins/tree/master/regression/basic-flatfield-estimation-plugin' -hints: - cwltool:CUDARequirement: - cudaComputeCapabilityMin: '3.0' - cudaDeviceCountMax: 1 - cudaDeviceCountMin: 1 - cudaVersionMin: '11.4' -inputs: - filePattern: - doc: File pattern to subset data - inputBinding: - prefix: --filePattern - label: File pattern to subset data - type: string? - getDarkfield: - doc: If 'true', will calculate darkfield image - inputBinding: - prefix: --getDarkfield - label: If 'true', will calculate darkfield image - type: boolean? - groupBy: - doc: Variables to group together - inputBinding: - prefix: --groupBy - label: Variables to group together - type: string? - inpDir: - doc: Path to input images - inputBinding: - prefix: --inpDir - label: Path to input images - type: Directory - outDir: - doc: Output image collection - inputBinding: - prefix: --outDir - label: Output image collection - type: Directory - preview: - doc: Generate a JSON file describing what the outputs should be - inputBinding: - prefix: --preview - label: Generate a JSON file describing what the outputs should be - type: boolean? -label: BaSiC Flatfield Estimation -outputs: - outDir: - doc: Output image collection - label: Output image collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory - preview_json: - doc: JSON file describing what the outputs should be - format: edam:format_3464 - label: JSON file describing what the outputs should be - outputBinding: - glob: preview.json - type: File? -requirements: - DockerRequirement: - dockerPull: polusai/basic-flatfield-estimation-plugin:2.1.1 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/bbbcdownload.cwl b/cwl_adapters/bbbcdownload.cwl deleted file mode 100644 index 9eeba93..0000000 --- a/cwl_adapters/bbbcdownload.cwl +++ /dev/null @@ -1,29 +0,0 @@ -class: CommandLineTool -cwlVersion: v1.2 -inputs: - name: - inputBinding: - prefix: --name - type: string - outDir: - inputBinding: - prefix: --outDir - type: Directory -outputs: - outDir: - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/bbbc-download-plugin:0.1.0-dev1 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/file-renaming.cwl b/cwl_adapters/file-renaming.cwl deleted file mode 100644 index 8ce2769..0000000 --- a/cwl_adapters/file-renaming.cwl +++ /dev/null @@ -1,66 +0,0 @@ -$namespaces: - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'Rename and store image collection files in a new image collection - - https://github.com/PolusAI/polus-plugins/tree/master/formats/file-renaming-plugin' -inputs: - filePattern: - inputBinding: - prefix: --filePattern - type: string - inpDir: - inputBinding: - prefix: --inpDir - type: Directory - mapDirectory: - inputBinding: - prefix: --mapDirectory - type: string? - outDir: - doc: Output collection - inputBinding: - prefix: --outDir - label: Output collection - type: Directory - outFilePattern: - inputBinding: - prefix: --outFilePattern - type: string - preview: - doc: Generate a JSON file describing what the outputs should be - inputBinding: - prefix: --preview - label: Generate a JSON file describing what the outputs should be - type: boolean? -label: File Renaming -outputs: - outDir: - doc: Output collection - label: Output collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory - preview_json: - doc: JSON file describing what the outputs should be - format: edam:format_3464 - label: JSON file describing what the outputs should be - outputBinding: - glob: preview.json - type: File? -requirements: - DockerRequirement: - dockerPull: polusai/file-renaming-plugin:0.2.1-dev0 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/image_assembler.cwl b/cwl_adapters/image_assembler.cwl deleted file mode 100644 index 77466cc..0000000 --- a/cwl_adapters/image_assembler.cwl +++ /dev/null @@ -1,77 +0,0 @@ -$namespaces: - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'This plugin assembles images into a stitched image using an image stitching - vector. - - https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/image-assembler-plugin' -inputs: - imgPath: - doc: Path to input image collection - inputBinding: - prefix: --imgPath - label: Path to input image collection - type: Directory - outDir: - doc: Output collection - inputBinding: - prefix: --outDir - label: Output collection - type: Directory - preview: - doc: Generate a JSON file describing what the outputs should be - inputBinding: - prefix: --preview - label: Generate a JSON file describing what the outputs should be - type: boolean? - stitchPath: - doc: Path to directory containing "stitching vector" file img-global-positions-0.txt - inputBinding: - prefix: --stitchPath - label: Path to directory containing "stitching vector" file img-global-positions-0.txt - type: Directory - timesliceNaming: - doc: Label images by timeslice rather than analyzing input image names - inputBinding: - prefix: --timesliceNaming - label: Label images by timeslice rather than analyzing input image names - type: boolean? -label: Image Assembler -outputs: - assembled_image: - doc: JSON file with outputs - format: edam:format_3727 - label: The assembled montage image - outputBinding: - glob: '*.ome.tif' - type: File? - outDir: - doc: Output collection - label: Output collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory - preview_json: - doc: JSON file with outputs - format: edam:format_3464 - label: JSON file with outputs - outputBinding: - glob: preview.json - type: File? -requirements: - DockerRequirement: - dockerPull: polusai/image-assembler-plugin:1.4.0-dev0 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - $(inputs.stitchPath) - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/montage.cwl b/cwl_adapters/montage.cwl deleted file mode 100644 index dbd7a79..0000000 --- a/cwl_adapters/montage.cwl +++ /dev/null @@ -1,94 +0,0 @@ -$namespaces: - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'This plugin generates a stitching vector that will montage images together. - - https://github.com/PolusAI/polus-plugins/tree/master/transforms/images/montage-plugin' -inputs: - filePattern: - doc: Filename pattern used to parse data - inputBinding: - prefix: --filePattern - label: Filename pattern used to parse data - type: string - flipAxis: - doc: Axes to flip when laying out images - inputBinding: - prefix: --flipAxis - label: Axes to flip when laying out images - type: string? - gridSpacing: - doc: Specify spacing between images in the lowest grid - inputBinding: - prefix: --gridSpacing - label: Specify spacing between images in the lowest grid - type: int? - imageSpacing: - doc: Specify spacing multiplier between grids - inputBinding: - prefix: --imageSpacing - label: Specify spacing multiplier between grids - type: int? - inpDir: - doc: Input image collection to be processed by this plugin - inputBinding: - prefix: --inpDir - label: Input image collection to be processed by this plugin - type: Directory - layout: - doc: Specify montage organization - inputBinding: - prefix: --layout - label: Specify montage organization - type: string? - outDir: - doc: Output collection - inputBinding: - prefix: --outDir - label: Output collection - type: Directory - preview: - doc: Generate a JSON file describing what the outputs should be - inputBinding: - prefix: --preview - label: Generate a JSON file describing what the outputs should be - type: boolean? -label: Montage -outputs: - global_positions: - doc: The "stitching vector", i.e. the positions of the individual images in the - montage - label: The "stitching vector", i.e. the positions of the individual images in - the montage - outputBinding: - glob: $(inputs.outDir.basename)/img-global-positions-0.txt - type: File? - outDir: - doc: Output collection - label: Output collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory - preview_json: - doc: JSON file describing what the outputs should be - format: edam:format_3464 - label: JSON file describing what the outputs should be - outputBinding: - glob: preview.json - type: File? -requirements: - DockerRequirement: - dockerPull: polusai/montage-plugin:0.5.0 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/ome-converter.cwl b/cwl_adapters/ome-converter.cwl deleted file mode 100644 index 43171e9..0000000 --- a/cwl_adapters/ome-converter.cwl +++ /dev/null @@ -1,57 +0,0 @@ -$namespaces: - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'This WIPP plugin converts BioFormats supported data types to the OME Zarr file - format. - - https://github.com/PolusAI/polus-plugins/tree/master/formats/ome-converter-plugin' -inputs: - fileExtension: - default: default - doc: The file extension - inputBinding: - prefix: --fileExtension - label: The file extension - type: string - filePattern: - doc: A filepattern, used to select data for conversion - inputBinding: - prefix: --filePattern - label: A filepattern, used to select data for conversion - type: string - inpDir: - doc: Input generic data collection to be processed by this plugin - inputBinding: - prefix: --inpDir - label: Input generic data collection to be processed by this plugin - type: Directory - outDir: - doc: Output collection - inputBinding: - prefix: --outDir - label: Output collection - type: Directory -label: OME Zarr Converter -outputs: - outDir: - doc: Output collection - label: Output collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/cwl_adapters/precompute_slide.cwl b/cwl_adapters/precompute_slide.cwl deleted file mode 100644 index 35ab194..0000000 --- a/cwl_adapters/precompute_slide.cwl +++ /dev/null @@ -1,61 +0,0 @@ -$namespaces: - edam: https://edamontology.org/ -$schemas: -- https://raw.githubusercontent.com/edamontology/edamontology/master/EDAM_dev.owl -class: CommandLineTool -cwlVersion: v1.0 -doc: 'This plugin generates image pyramids in multiple viewing formats. - - https://github.com/PolusAI/polus-plugins/tree/master/visualization/polus-precompute-slide-plugin' -inputs: - filePattern: - doc: Filename pattern used to parse data - inputBinding: - prefix: --filePattern - label: Filename pattern used to parse data - type: string? - imageType: - doc: Image is either Segmentation or Image - inputBinding: - prefix: --imageType - label: Image is either Segmentation or Image - type: string - inpDir: - doc: Input generic data collection to be processed by this plugin - inputBinding: - prefix: --inpDir - label: Input generic data collection to be processed by this plugin - type: Directory - outDir: - doc: Output collection - inputBinding: - prefix: --outDir - label: Output collection - type: Directory - pyramidType: - doc: Build a DeepZoom, Neuroglancer, Zarr pyramid - inputBinding: - prefix: --pyramidType - label: Build a DeepZoom, Neuroglancer, Zarr pyramid - type: string -label: Precompute Slide -outputs: - outDir: - doc: Output collection - label: Output collection - outputBinding: - glob: $(inputs.outDir.basename) - type: Directory -requirements: - DockerRequirement: - dockerPull: polusai/precompute-slide-plugin:1.7.0-dev0 - EnvVarRequirement: - envDef: - HOME: /home/polusai - InitialWorkDirRequirement: - listing: - - entry: $(inputs.outDir) - writable: true - InlineJavascriptRequirement: {} - NetworkAccess: - networkAccess: true diff --git a/pyproject.toml b/pyproject.toml index 9ae0fcf..b0d1cb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,9 +19,6 @@ polus-tools = { git = "https://github.com/PolusAI/tools.git" } - - - [tool.poetry.group.dev.dependencies] jupyter = "^1.0.0" nbconvert = "^7.11.0" diff --git a/src/image/workflows/__main__.py b/src/image/workflows/__main__.py index 7935dcd..7e25851 100644 --- a/src/image/workflows/__main__.py +++ b/src/image/workflows/__main__.py @@ -2,9 +2,11 @@ import logging import typer from pathlib import Path -from utils import LoadYaml -# from cwl_analysis import CWLAnalysisWorkflow -from cwl_nuclear_segmentation import CWLSegmentationWorkflow +from typing import Optional +from image.workflows.utils import LoadYaml +from image.workflows.cwl_analysis import CWLAnalysisWorkflow +from image.workflows.cwl_nuclear_segmentation import CWLSegmentationWorkflow +from image.workflows.cwl_visualization import CWLVisualizationWorkflow @@ -33,6 +35,12 @@ def main( "--workflow", "-w", help="Name of cwl workflow" + ), + out_dir: Optional[Path] = typer.Option( + None, + "--outDir", + "-o", + help="Name of cwl workflow" ) ) -> None: @@ -40,26 +48,37 @@ def main( logger.info(f"name = {name}") logger.info(f"workflow = {workflow}") + logger.info(f"outDir = {out_dir}") - config_path = Path(__file__).parent.parent.parent.parent.joinpath(f"configuration/{workflow}/{name}.yml") - + config_path = Path(__file__).resolve().parents[3].joinpath(f"configuration/{workflow}/{name}.yml") + model = LoadYaml(workflow=workflow, config_path=config_path) params = model.parse_yaml() + if out_dir == None: + out_dir = Path(__file__).parent.parent.parent.parent + params["out_dir"] = out_dir + + # if workflow == "analysis": # logger.info(f"Executing {workflow}!!!") # model = CWLAnalysisWorkflow(**params) # model.workflow() - if workflow == "segmentation": + # if workflow == "segmentation": + # logger.info(f"Executing {workflow}!!!") + # model = CWLSegmentationWorkflow(**params) + # model.workflow() + + if workflow == "visualization": logger.info(f"Executing {workflow}!!!") - model = CWLSegmentationWorkflow(**params) + model = CWLVisualizationWorkflow(**params) model.workflow() - # logger.info("Completed CWL workflow!!!") + logger.info("Completed CWL workflow!!!") if __name__ == "__main__": diff --git a/src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc b/src/image/workflows/__pycache__/cwl_nuclear_segmentation.cpython-310.pyc deleted file mode 100644 index 6fa22e4e95533c355b0ad0d339bda07158bfee24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7048 zcma)B&6C{5bq5+44CZr}%O$C$C<>IEgrj6&PfE;qb$>p4Uw-d`w%=}(szIMsAGh66xboYDx z@ZRh9wQ;Rh*6@pWf7N~CPc`j7>1Fk2;N`n`l7B_RHLi!+KzH>4bD2)>EHnlt({yc6 za4j`sgvEjF+OyS?TT;8qZdvsex1##0TUC9{t*O55*3p~c+F;#X*ELa|o)a6#x_e%n zpKgls{)J;**Y0S%z^$(|Zarh}Mb7SNjp8psm)2mJc`p^i%zhBdN8Ki`MZVHnZ@K|_? zcx*f+QP{`(md4AAQDrfzs!RiFt76~n5px(`BnZVMr#;3{2E5<{0;Er8`-4~AAEXG z^adhI{WOT9IamBUkZ0M!?}mdd&fcgK3ST;_d!1o06k!mFW;x%PrZQ-cQjzSacY==m zfpB)5J?i4F6Ab)b5cM4Hr+y+*nz|`tpF1~$nAU`IQ$(Hqz?YAl592Tt9U{uP-cQqE zveRm{+wD%XO%Oqpq(O?qnz8J)6s!k~Nu#+qyBmbU8~SM~WCR)@V>Rl=(i!?PAv&ag z;LtWAGg(5$qtrVMEk##IDYygpfuB;h1YMk&6Y_@-4Wd*;Nst~Yc8Dbw$M1qdM-n+i z-9cYO&PWkD@P{4`B=`}_!^LTd=)K|tM~QIKSbd{#uw^0K)8{SloWKrNId}cxkSKtB zx^c)s6me*=zuTQ3yW`yNFTVh1<~$aIVHh8RS)d~kBu1wj`e`>1A$L0QU>HZxxWwto zc;KXjp}QN%r2zQyfd4Ok4xRZU_!so|WIP&r?ZX}C4vpwbpZGHH+o1qWGr`XEWdYRo zJC6u3;$A0)CMeb1ac(^o9jKwRT*e2^mlbfP-OfznUshdJs%-tVnq{>U>XTJg5h${UM&@`)DTmMDw%>L#s~=v}{sBYfLP( z=A?+WFtO2E6BBK5Qb21@%4kbzIjua?_pABew8r&g!Uk&%OT+fuYUd#IeC`jS<4I#p zmT;aVpCXA2d5)SoHEYzYQ&Xj;M$HB_CYsFl@~i1(7H%=LD6{wDAcE%gv)UXzsb{}Y z$xJ9;_w-3itf!7sB+yAe)8uRDe%ot(k$~8i-){R!(0{MB8+S%@os!lkal3WnDGUr* z%-sHLAIw}z(n@3}pKC4W=<}o8jKWeaZ)J0MNFNnMLF##LRjh+q8}SmcC1Ye&bV>Rj&yEJ&56$8 zt)_)z=mYuDe`Z@+j~(meMaOS_7)MWpOr2pkf-TQ2Jg#{M3Lc_OM00Fi_a@toKhJbW z+Bi+>Xnsk}Uc<_m6q%8X+F1bxJQP_`iecz?L}rp4v%+XN6tcnOC4y8+Lb6Z-ky{vL zmZ#480ON#I;xfCckFTD|7`6EXJ4odTP_g418072#GuRQEFu4xKuqiP_|B3b-!2>t( z>4ov-J8)UPlfccvV&ujev<5N9CqHanZ@$wQ8_jo`@(tirU*0g}Rg5z$f%EM2vx2&; zn5B=)^ru+?uB6k?KTdt7`oc{wKf_aJc=;JO0Fn?ptIX8bnaRd~appI_0{jFA3P3}s z6bE)O;QA3GE=*Xi6h{U(j?4*^gPX@t0)!{lCB>%+0-MFWy`>!+m$V~eVt_-X@%uM8 zSC%jd{iILUzb7Igeb~pP`EvXSQOLr=k4x7dUT;jUKWsF>#fJ~yefZv(J>(hlI!bZI zMX&RN&JI7yb%}U8W2<}<yV7XbUF3jX=Y!8QQ{sM zOGUKT7VrQRIFNDai$o-!0B5p^#x&~SeU&kty|~EO@7W8pF#h(LQut$dGh-yf6LO(T zx{ym7FR7O5Q@W=5Gm?-2`RH33#s=nLf~N(N7^DDmeaxmX)BD9K4D@pTCmJ`NT28dDA!!;SLb`pO$~6BLx%`S56E& zz?6{dsk5qnQNOI4FKolsO?~|KSyEnsmZW?qpSzmn4xc$>2*`^7kuWs7gUwr;);vhD@6Qm!F+A3<9iD%A;WPG=k_>V+7lw!~t z#^EU0&bK5gkoI_d5QT`PTcb1x^SSN$+-zqn2WajmaWwvZtM3nz{-eHs;LDlMBfcgr zifiU~j>hv4wv`9V`G)QJ>^1?N!2Iw`%y7J-6C*o8o*}(>uzXqI#^g4(kW&)!RdhdS zMM2W{AAj>N-#;x%oKC!)sR=1(ih{`Apd7117XOay+n0hLjOIoS) zql6fd6JKET6kFhgtO zJ9m{yRy6{6`yfK@GmMqP7txbI#*xZC!%6z@d){^3clUi-s-Ar;iz5F&CHKJyL)R~YxSXCxxMoP_uQ@M$>00( z^Cf(yAMB04b4$uts`OO~=fF=8l{lkFJcV0J;na}pBM&_2!1R)+)X~r`ie@d$7;+Dp*}ZPp7Y=KgUaGH*jVd(B{gS z6$MvNa0TzYf-88N6BBp}-UYm0M1KkWYv{Ltp$ZHI55+#Tmlx;P6dpRSE`K^H0NU~@ zQBDs=3a{l^fUg7ITA^S~K^7+#V2WOaucLjPpF?{EyEm}A=$Yvo)2sYET}ySszaxET zbp4T*Ta#k1cw|S#*&MwWj_mYJO5gVxqL0nz>=`>MO-fUTVnX!Ss8={u5tU!W6~8q8 z)}jJ(LtXP(4tuh)v@8Xp=%;@2ThNqoMOEyewkYq&zoMC%@&YK^<#Uvcj_oFHY%>kI z*se+}vxtCBF$WW?|yYmtoFbsOY;Fi(P4sr{shNnt6ZmHXypHI~sl&$CQ zsA;42d;f4KP!Nivh{|YBPz!Z0w1WsWxUmYg?)MRn^-+109|2r`Obx}-S&@o|WD*TI ziW+4V|GOqB8D)A@WRs*(k~Bt=R!EY*B+(*KM1-GY_MF3~jp*mt^dmgUKcQJK*f5~$ zi+BKIvyO@k&01`og?j;i+>Gn{GJ)?e^Jf&A68($V;1|ZU1VR|Hv5fTV*hQI*?-xU z@~=0x?1Ej|*xIO+YxVOAXXBE4A&H0mAWoY8FhCV8?St-NkdsH+;+|KrYE$JX%~|T> z7SwB2oW09;u@i477=}Sx)fnZjnj)7Bmdq{$u7<5T6q zk)u5IYml}OY^(V=Wq^$qj? E06%%!y#N3J diff --git a/src/image/workflows/__pycache__/utils.cpython-310.pyc b/src/image/workflows/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 23397ca67a45946c94b3e449e6753569a209fb74..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2779 zcma)8-H+Qu5Z|?(#Bq||QQB(@mGJS=DiS1Z`4B>jAwuIu_;@vfWY-j zh$A%3`wG*LtWv=yoN0*y&}uKiVECA(OJuqR^eB9JHIpE7(AoW}4+99x|kt>73_t-tWnBG>o^$rZr&0m}gQXx|HL{PX*H>DO7aKendQi zcoalzPHwYg2h9o|C#6IK5Vr$aft|`Kk{S{pNgW9$NH&l(k+hH;1Cqd6xX|tjEY$%b zNJKe_oZCdW!n3Oa_iE~#A+sfFoUTDG0cqFNuQJU(E4j#x$AR5w`^JxB#4e6a5XXa( zj{w2gG~@XGh~?8a(uI#1S7|A+R*bh@mbqDqV^$QUX1F;O$Ipm71#d0+bE|tBa-_O! zdz-1Of2n)5OhyC9m+F36ZgyttMm7~ zHs@2%7PYre_i5CyHqH?n4|F}y!7`ufG(pnBl)ySROG)bRCT~6Vq9uL|(z0ayn^a)mb@u63RJ*%Wd;5+&J=NO;Z3xtv zoLCGG`&D%&?U0v3g-{Bic$6u<8s5qU#0}yw>5F71%yh_lEGya-iD8+l)l((_e@QE%abvo@8lK+tj($zcOdAzC@y z^~|;H_18dEVUzftH^u3d=W$xFNGNE}E^I!uc44VsfdEM4o~@#N=lj;TL_1I1Z^%=+ zNAJMo@rXzET}lf7B2lLvQcCc57nb(ECMm_Xd+hJhCzYqYl5s zyDnBGe51x|JC3{w3G&YnUO`@?W&J7G>0rl|H+lUZ76E99BNpO_MFT7ZZyd3(C(`8N zEheEFd$#eyX|s*wv@LPghiNH8!IFN6QxrP1W2oF%013fCpk73fu!eg8(wD&(&K_*< z8#q#7d$7$PoclPM>4r@mRFKoTU}$XUgJ;yxbed3G?!j6Py#=S;@bV?}i6wiMOwJ!> zZU`}p*p7;|yi8cGE_dcOTa&f*JP&nWgrErJ3P+?0nH08fxk$4@@c-^y`38K>9!Zn; z|JmI~{b83|bPg+bR!6I=@*Mb>to<+Zg?gX6z>~flibB2&u8fD9jp~Y}BFzn z_mRv8gynEA@(d7LrudnFV93j~={WQpZPp#SLSL;}opwup02-zSf3W&_wtX_%a zGX0Q^@d7AC!SzqJKEC~7ytRH^z6%2R9!l#ULbupIAok?8ZvQPN None: - """Create directories for CWL outputs""" - cwl_path = self.PATH.joinpath("cwl_adapters") - cwl_path.mkdir(parents=True, exist_ok=True) - workflow_path = self.PATH.joinpath("outputs").resolve() - workflow_path.mkdir(exist_ok=True) - return cwl_path, workflow_path - - def _clean(self) -> None: - """Cleaning of redundant directories generating on running CWL""" - logger.info("Cleaning directories!!!") - destination_path = self.workflow_path.joinpath("experiment") - dir_names = ("autogenerated", "cachedir", "RUNS", "provenance") - for i, d in zip(self.wic_path.iterdir(), self.PATH.iterdir()): - if i.name.endswith(dir_names): - shutil.rmtree(d) - if d.name.endswith(dir_names): - shutil.rmtree(d) - - for d in destination_path.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.rmtree(d) - for d in self.PATH.iterdir(): - if d.name.endswith("cwl_adapters"): - shutil.move(d, destination_path) - - return + self.out_dir = out_dir + self.adapters_path = Path(__file__).parent.parent.parent.parent.joinpath("cwl_adapters") + self.work_dir = Path.cwd() def _move_outputs(self) -> None: - """Transfer outputs from the WIC directory to the workflow path""" - logger.info("Move outputs to workflow path!!!") - for d in self.wic_path.iterdir(): - if d.name.endswith("outdir"): - shutil.move(d, self.workflow_path) - return - - def _camel(self, name: str) -> str: - """Convert plugin name to camel case.""" - name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") - return "".join([name[0].upper(), name[1:]]) - - def _string_after_period(self, x): - """Get a string after period.""" - match = re.search(r"\.(.*)", x) - if match: - # Get the part after the period - return f".*.{match.group(1)}" - else: - return "" - - def _add_backslash_before_parentheses(self, x): - """Add backslash to generate ff_pattern and df_pattern""" - # Define the regular expression pattern to match parenthesis - pattern_1 = r"(\()|(\))" - # Use re.sub() to add a backslash before starting and finishing parenthesis - result = re.sub(pattern_1, r"\\\1\2", x) - pattern_2 = r"\d" - result = ( - result.split("_c")[0] - + "_c{c:d}" - + re.sub(pattern_2, "", result.split("_c")[1]) - ) - return result - - def create_step(self, url: str) -> api.Step: - """Generate the plugin class name from the plugin name specified in the manifest""" - manifest = pp.submit_plugin(url) - plugin_version = str(manifest.version) - cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( - self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") + """Move output files and directories to the specified output directory.""" + logger.info("Moving directories and JSON files to output directory.") + dir_names = {"autogenerated", "cachedir", "provenance", "outdir"} + + # Move specified directories + for directory in self.work_dir.iterdir(): + if directory.name in dir_names: + shutil.move(directory, self.out_dir) + + # Move JSON files + for json_file in self.work_dir.rglob("*.json"): + shutil.move(json_file, self.out_dir) + + @staticmethod + def _to_camel_case(name: str) -> str: + """Convert a plugin name to camel case.""" + return re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + + @staticmethod + def _extract_file_extension(pattern: str) -> str: + """Extract and return the string after a period in the pattern.""" + match = re.search(r"\.(.*)", pattern) + return f".*.{match.group(1)}" if match else "" + + @staticmethod + def _escape_parentheses(pattern: str) -> str: + """Escape parentheses and generate patterns for flatfield and darkfield correction.""" + pattern = re.sub(r"(\()|(\))", r"\\\1\2", pattern) + return pattern.split("_c")[0] + "_c{c:d}" + re.sub(r"\d", "", pattern.split("_c")[1]) + + def create_step(self, plugin_url: str) -> Step: + """Create a step for the workflow based on the plugin manifest.""" + manifest = dict(pp.submit_plugin(plugin_url)) + plugin_version = str(manifest['version']) + cwl_tool = pp.get_plugin(self._to_camel_case(manifest['name']), plugin_version).save_cwl( + self.adapters_path.joinpath(f"{self._to_camel_case(manifest['name'])}.cwl") ) - step = api.Step(cwl_tool) - return step + self._modify_cwl() + return Step(cwl_tool) - def manifest_urls(self, x: str) -> str: - """URLs on GitHub for plugin manifests""" + def _get_manifest_url(self, plugin_name: str) -> str: + """Retrieve the URL for the plugin manifest from GitHub.""" + return MANIFEST_URLS.get(plugin_name, "") - urls = { - "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", - "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", - "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", - "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", - "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", - "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", - "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/nyxus_manifest/features/nyxus-plugin/plugin.json", - } - return urls[x] - - def modify_cwl(self) -> None: - """Modify CWL to incorporate environmental variables and permission access""" - for f in list(self.cwl_path.rglob("*.cwl")): - if "cwl" in f.name: + def _modify_cwl(self) -> None: + """Modify CWL files to include environmental variables and permissions.""" + for cwl_file in self.adapters_path.rglob("*.cwl"): + if "cwl" in cwl_file.name: try: - with Path.open(f, "r") as file: + with cwl_file.open("r") as file: config = yaml.safe_load(file) - config["requirements"]["NetworkAccess"] = { - "networkAccess": True - } - config["requirements"]["EnvVarRequirement"] = { - "envDef": {"HOME": "/home/polusai"} - } - with open(f, "w") as out_file: - yaml.dump(config, out_file) + config.setdefault("requirements", {}) + config["requirements"]["NetworkAccess"] = {"networkAccess": True} + config["requirements"]["EnvVarRequirement"] = {"envDef": {"HOME": "/home/polusai"}} + with cwl_file.open("w") as out_file: + yaml.dump(config, out_file) except FileNotFoundError: - logger.info("Error: There was an unexpected error while processing the file.") - return + logger.error(f"Error processing file: {cwl_file}") def workflow(self) -> None: - """ - A CWL feature extraction pipeline. - """ - # BBBCDownload - bbbc = self.create_step(self.manifest_urls("bbbc_download")) + """Execute the CWL nuclear segmentation pipeline.""" + logger.info("Starting CWL nuclear segmentation workflow.") + + # Step: BBBC Download + bbbc = self.create_step(self._get_manifest_url("bbbc_download")) bbbc.name = self.name bbbc.outDir = Path("bbbc.outDir") - # Renaming plugin - rename = self.create_step(self.manifest_urls("file_renaming")) + # Step: File Renaming + rename = self.create_step(self._get_manifest_url("file_renaming")) rename.filePattern = self.file_pattern rename.outFilePattern = self.out_file_pattern rename.mapDirectory = self.map_directory rename.inpDir = bbbc.outDir rename.outDir = Path("rename.outDir") - # OMEConverter - ome_converter = self.create_step(self.manifest_urls("ome_converter")) - ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + # Step: OME Converter + ome_converter = self.create_step(self._get_manifest_url("ome_converter")) + ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") + # Optional: Background correction if self.background_correction: - # Estimate Flatfield - estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield = self.create_step(self._get_manifest_url("estimate_flatfield")) estimate_flatfield.inpDir = ome_converter.outDir estimate_flatfield.filePattern = self.image_pattern estimate_flatfield.groupBy = self.group_by estimate_flatfield.getDarkfield = True estimate_flatfield.outDir = Path("estimate_flatfield.outDir") - # # Apply Flatfield - apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield = self.create_step(self._get_manifest_url("apply_flatfield")) apply_flatfield.imgDir = ome_converter.outDir apply_flatfield.imgPattern = self.image_pattern apply_flatfield.ffDir = estimate_flatfield.outDir apply_flatfield.ffPattern = self.ff_pattern apply_flatfield.dfPattern = self.df_pattern apply_flatfield.outDir = Path("apply_flatfield.outDir") - apply_flatfield.dataType = True - ## Kaggle Nuclei Segmentation - kaggle_nuclei_segmentation = self.create_step( - self.manifest_urls("kaggle_nuclei_segmentation") - ) - if self.background_correction: - kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir - else: - kaggle_nuclei_segmentation.inpDir = ome_converter.outDir - kaggle_nuclei_segmentation.filePattern = self.image_pattern - kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + # Step: Kaggle Nuclei Segmentation + kaggle_segmentation = self.create_step(self._get_manifest_url("kaggle_nuclei_segmentation")) + kaggle_segmentation.inpDir = apply_flatfield.outDir if self.background_correction else ome_converter.outDir + kaggle_segmentation.filePattern = self.image_pattern + kaggle_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") - ## FTL Label Plugin - ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) - ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir - ftl_plugin.connectivity = 1 + # Step: FTL Label Plugin + ftl_plugin = self.create_step(self._get_manifest_url("ftl_plugin")) + ftl_plugin.inpDir = kaggle_segmentation.outDir + ftl_plugin.connectivity = "1" ftl_plugin.binarizationThreshold = 0.5 ftl_plugin.outDir = Path("ftl_plugin.outDir") + # # ## Nyxus Plugin - nyxus_plugin = self.create_step(self.manifest_urls("nyxus_plugin")) - if self.background_correction: - nyxus_plugin.inpDir = apply_flatfield.outDir - else: - nyxus_plugin.inpDir = ome_converter.outDir + nyxus_plugin = self.create_step(self._get_manifest_url("nyxus_plugin")) + # nyxus_plugin = Step(clt_path='/Users/abbasih2/Documents/Job/Axle_Work/image-workflows/cwl_adapters/NyxusPlugin.cwl') + nyxus_plugin.inpDir = apply_flatfield.outDir if self.background_correction else ome_converter.outDir nyxus_plugin.segDir = ftl_plugin.outDir nyxus_plugin.intPattern = self.image_pattern nyxus_plugin.segPattern = self.seg_pattern @@ -249,41 +194,20 @@ def workflow(self) -> None: nyxus_plugin.pixelPerMicron = 1.0 nyxus_plugin.outDir = Path("nyxus_plugin.outDir") - logger.info("Initiating CWL Feature Extraction Workflow!!!") - if self.background_correction: - steps = [ - bbbc, - rename, - ome_converter, - estimate_flatfield, - apply_flatfield, - kaggle_nuclei_segmentation, - ftl_plugin, - nyxus_plugin - ] - else: - steps = [ - bbbc, - rename, - ome_converter, - kaggle_nuclei_segmentation, - ftl_plugin, - nyxus_plugin - ] - - workflow = api.Workflow(steps, "experiment", self.workflow_path) - # # Saving CLT for plugins - workflow._save_all_cwl(overwrite=True) - # # Adding environmental variables for bbbc_download and ome_converter plugin - self.modify_cwl() - # # # Save yaml to run CWL tool - workflow._save_yaml() + # Run the workflow + steps = [ + bbbc, rename, ome_converter, + estimate_flatfield if self.background_correction else None, + apply_flatfield if self.background_correction else None, + kaggle_segmentation, + ftl_plugin, + nyxus_plugin + ] + + workflow = Workflow(steps, f"{self.name}_workflow") # Compile and run using WIC python API - workflow.compile(run_local=True, overwrite=False) - # # print(workflow.yml_path) - # # clean autognerated directories - self._clean() + workflow.compile() + workflow.run() self._move_outputs() - logger.info("Completed CWL Feature Extraction /Analysis Workflow.") + logger.info("Completed CWL nuclear segmentation workflow.") return - \ No newline at end of file diff --git a/src/image/workflows/cwl_nuclear_segmentation.py b/src/image/workflows/cwl_nuclear_segmentation.py index d8c67fd..cb75a3c 100644 --- a/src/image/workflows/cwl_nuclear_segmentation.py +++ b/src/image/workflows/cwl_nuclear_segmentation.py @@ -1,36 +1,36 @@ -from sophios.api.pythonapi import Step, Workflow -import polus.tools.plugins as pp -from pathlib import Path -import yaml import logging import re import shutil +from pathlib import Path import typing -import sys -# sys.path.append('../') -from utils import GITHUB_TAG +import yaml + +from sophios.api.pythonapi import Step, Workflow +import polus.tools.plugins as pp +from image.workflows.utils import OUT_PATH,MANIFEST_URLS + # Initialize the logger logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) - class CWLSegmentationWorkflow: """ - A CWL Nuclear Segmentation pipeline. - + A CWL Nuclear Segmentation pipeline to process imaging datasets. + Attributes: - name : Name of imaging dataset of Broad Bioimage Benchmark Collection (https://bbbc.broadinstitute.org/image_sets). - file_pattern : Pattern for parsing raw filenames. - out_file_pattern : Preferred format for filenames - image_pattern : Pattern for parsing intensity image filenames after renaming when using map_directory - seg_pattern : Pattern use to parse segmentation image filenames - map_directory : Mapping of folder name - ff_pattern: The filename pattern employed to select flatfield components from the ffDir. - df_pattern:The filename pattern employed to select darkfield components from the ffDir - group_by: Grouping variables for filePattern - background_correction: Execute background correction + name: Name of the imaging dataset. + file_pattern: Pattern for parsing raw filenames. + out_file_pattern: Desired format for output filenames. + image_pattern: Pattern for parsing intensity image filenames. + seg_pattern: Pattern to parse segmentation image filenames. + ff_pattern: Filename pattern for selecting flatfield components. + df_pattern: Filename pattern for selecting darkfield components. + group_by: Variables used for grouping the file pattern. + background_correction: Flag to enable background correction. + map_directory: Enable mapping of folder names. + out_dir: Directory for saving outputs. """ def __init__( self, @@ -44,189 +44,150 @@ def __init__( group_by: typing.Optional[str] = '', map_directory: typing.Optional[bool] = False, background_correction: typing.Optional[bool] = False, + out_dir: typing.Optional[Path] = OUT_PATH ): self.name = name self.file_pattern = file_pattern self.out_file_pattern = out_file_pattern - self.map_directory = map_directory + self.image_pattern = image_pattern + self.seg_pattern = seg_pattern self.ff_pattern = ff_pattern self.df_pattern = df_pattern self.group_by = group_by - self.image_pattern = image_pattern - self.seg_pattern = seg_pattern + self.map_directory = map_directory self.background_correction = background_correction + self.out_dir = out_dir self.adapters_path = Path(__file__).parent.parent.parent.parent.joinpath("cwl_adapters") + self.work_dir = Path.cwd() - def _camel(self, name: str) -> str: - """Convert plugin name to camel case.""" - name = re.sub(r"(_|-)+", " ", name).title().replace(" ", "") - return "".join([name[0].upper(), name[1:]]) - - def _string_after_period(self, x): - """Get a string after period.""" - match = re.search(r"\.(.*)", x) - if match: - # Get the part after the period - return f".*.{match.group(1)}" - else: - return "" - - def _add_backslash_before_parentheses(self, x): - """Add backslash to generate ff_pattern and df_pattern""" - # Define the regular expression pattern to match parenthesis - pattern_1 = r"(\()|(\))" - # Use re.sub() to add a backslash before starting and finishing parenthesis - result = re.sub(pattern_1, r"\\\1\2", x) - pattern_2 = r"\d" - result = ( - result.split("_c")[0] - + "_c{c:d}" - + re.sub(pattern_2, "", result.split("_c")[1]) - ) - return result - - # def create_step(self, url: str) -> Step: - # """Generate the plugin class name from the plugin name specified in the manifest""" - # manifest = dict(pp.submit_plugin(url)) - # plugin_version = str(manifest.version) - # cwl_tool = pp.get_plugin(self._camel(manifest.name), plugin_version).save_cwl( - # self.cwl_path.joinpath(f"{self._camel(manifest.name)}.cwl") - # ) - # self.modify_cwl() - # step = Step(cwl_tool) - # return step - - def create_step(self, url: str) -> Step: - """Generate the plugin class name from the plugin name specified in the manifest""" - manifest = dict(pp.submit_plugin(url)) + def _move_outputs(self) -> None: + """Move output files and directories to the specified output directory.""" + logger.info("Moving directories and JSON files to output directory.") + dir_names = {"autogenerated", "cachedir", "provenance", "outdir"} + + # Move specified directories + for directory in self.work_dir.iterdir(): + if directory.name in dir_names: + shutil.move(directory, self.out_dir) + + # Move JSON files + for json_file in self.work_dir.rglob("*.json"): + shutil.move(json_file, self.out_dir) + + @staticmethod + def _to_camel_case(name: str) -> str: + """Convert a plugin name to camel case.""" + return re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + + @staticmethod + def _extract_file_extension(pattern: str) -> str: + """Extract and return the string after a period in the pattern.""" + match = re.search(r"\.(.*)", pattern) + return f".*.{match.group(1)}" if match else "" + + @staticmethod + def _escape_parentheses(pattern: str) -> str: + """Escape parentheses and generate patterns for flatfield and darkfield correction.""" + pattern = re.sub(r"(\()|(\))", r"\\\1\2", pattern) + return pattern.split("_c")[0] + "_c{c:d}" + re.sub(r"\d", "", pattern.split("_c")[1]) + + def create_step(self, plugin_url: str) -> Step: + """Create a step for the workflow based on the plugin manifest.""" + manifest = dict(pp.submit_plugin(plugin_url)) plugin_version = str(manifest['version']) - cwl_tool = pp.get_plugin(self._camel(manifest['name']), plugin_version).save_cwl( - self.adapters_path.joinpath(f"{self._camel(manifest['name'])}.cwl") + cwl_tool = pp.get_plugin(self._to_camel_case(manifest['name']), plugin_version).save_cwl( + self.adapters_path.joinpath(f"{self._to_camel_case(manifest['name'])}.cwl") ) - self.modify_cwl() - step = Step(cwl_tool) - return step - - def manifest_urls(self, x: str) -> str: - """URLs on GitHub for plugin manifests""" - urls = { - "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", - "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/basecontainer_omecontainer/formats/ome-converter-plugin/plugin.json", - "estimate_flatfield": f"{GITHUB_TAG}/nishaq503/image-tools/fix/basic/regression/basic-flatfield-estimation-tool/plugin.json", - "apply_flatfield": f"{GITHUB_TAG}/hamshkhawar/image-tools/cast_images/transforms/images/apply-flatfield-tool/plugin.json", - "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", - "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json" - } - return urls[x] - - def modify_cwl(self) -> None: - """Modify CWL to incorporate environmental variables and permission access""" - for f in list(self.adapters_path.rglob("*.cwl")): - if "cwl" in f.name: + self._modify_cwl() + return Step(cwl_tool) + + def _get_manifest_url(self, plugin_name: str) -> str: + """Retrieve the URL for the plugin manifest from GitHub.""" + return MANIFEST_URLS.get(plugin_name, "") + + def _modify_cwl(self) -> None: + """Modify CWL files to include environmental variables and permissions.""" + for cwl_file in self.adapters_path.rglob("*.cwl"): + if "cwl" in cwl_file.name: try: - with Path.open(f, "r") as file: + with cwl_file.open("r") as file: config = yaml.safe_load(file) - config["requirements"]["NetworkAccess"] = { - "networkAccess": True - } - config["requirements"]["EnvVarRequirement"] = { - "envDef": {"HOME": "/home/polusai"} - } - with open(f, "w") as out_file: - yaml.dump(config, out_file) + config.setdefault("requirements", {}) + config["requirements"]["NetworkAccess"] = {"networkAccess": True} + config["requirements"]["EnvVarRequirement"] = {"envDef": {"HOME": "/home/polusai"}} + with cwl_file.open("w") as out_file: + yaml.dump(config, out_file) except FileNotFoundError: - logger.info("Error: There was an unexpected error while processing the file.") - return + logger.error(f"Error processing file: {cwl_file}") def workflow(self) -> None: - """ - A CWL nuclear segmentation pipeline. - """ - # BBBCDownload - bbbc = self.create_step(self.manifest_urls("bbbc_download")) + """Execute the CWL nuclear segmentation pipeline.""" + logger.info("Starting CWL nuclear segmentation workflow.") + + # Step: BBBC Download + bbbc = self.create_step(self._get_manifest_url("bbbc_download")) bbbc.name = self.name bbbc.outDir = Path("bbbc.outDir") - # Renaming plugin - rename = self.create_step(self.manifest_urls("file_renaming")) + # Step: File Renaming + rename = self.create_step(self._get_manifest_url("file_renaming")) rename.filePattern = self.file_pattern rename.outFilePattern = self.out_file_pattern rename.mapDirectory = self.map_directory rename.inpDir = bbbc.outDir rename.outDir = Path("rename.outDir") - - # OMEConverter - ome_converter = self.create_step(self.manifest_urls("ome_converter")) - ome_converter.filePattern = self._string_after_period(self.out_file_pattern) + # Step: OME Converter + ome_converter = self.create_step(self._get_manifest_url("ome_converter")) + ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") + # Optional: Background correction if self.background_correction: - # Estimate Flatfield - estimate_flatfield = self.create_step(self.manifest_urls("estimate_flatfield")) + estimate_flatfield = self.create_step(self._get_manifest_url("estimate_flatfield")) estimate_flatfield.inpDir = ome_converter.outDir estimate_flatfield.filePattern = self.image_pattern estimate_flatfield.groupBy = self.group_by estimate_flatfield.getDarkfield = True estimate_flatfield.outDir = Path("estimate_flatfield.outDir") - # # Apply Flatfield - apply_flatfield = self.create_step(self.manifest_urls("apply_flatfield")) + apply_flatfield = self.create_step(self._get_manifest_url("apply_flatfield")) apply_flatfield.imgDir = ome_converter.outDir apply_flatfield.imgPattern = self.image_pattern apply_flatfield.ffDir = estimate_flatfield.outDir apply_flatfield.ffPattern = self.ff_pattern apply_flatfield.dfPattern = self.df_pattern apply_flatfield.outDir = Path("apply_flatfield.outDir") - apply_flatfield.dataType = True - - ## Kaggle Nuclei Segmentation - kaggle_nuclei_segmentation = self.create_step( - self.manifest_urls("kaggle_nuclei_segmentation") - ) - if self.background_correction: - kaggle_nuclei_segmentation.inpDir = apply_flatfield.outDir - else: - kaggle_nuclei_segmentation.inpDir = ome_converter.outDir - kaggle_nuclei_segmentation.filePattern = self.image_pattern - kaggle_nuclei_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") + # Step: Kaggle Nuclei Segmentation + kaggle_segmentation = self.create_step(self._get_manifest_url("kaggle_nuclei_segmentation")) + kaggle_segmentation.inpDir = apply_flatfield.outDir if self.background_correction else ome_converter.outDir + kaggle_segmentation.filePattern = self.image_pattern + kaggle_segmentation.outDir = Path("kaggle_nuclei_segmentation.outDir") - ## FTL Label Plugin - ftl_plugin = self.create_step(self.manifest_urls("ftl_plugin")) - ftl_plugin.inpDir = kaggle_nuclei_segmentation.outDir + # Step: FTL Label Plugin + ftl_plugin = self.create_step(self._get_manifest_url("ftl_plugin")) + ftl_plugin.inpDir = kaggle_segmentation.outDir ftl_plugin.connectivity = "1" ftl_plugin.binarizationThreshold = 0.5 ftl_plugin.outDir = Path("ftl_plugin.outDir") - logger.info("Initiating CWL Nuclear Segmentation Workflow!!!") - if self.background_correction: - steps = [ - bbbc, - rename, - ome_converter, - estimate_flatfield, - apply_flatfield, - kaggle_nuclei_segmentation, - ftl_plugin - ] - else: - steps = [ - bbbc, - rename, - ome_converter, - kaggle_nuclei_segmentation, - ftl_plugin - ] - + # Run the workflow + steps = [ + bbbc, rename, ome_converter, + estimate_flatfield if self.background_correction else None, + apply_flatfield if self.background_correction else None, + kaggle_segmentation, + ftl_plugin + ] workflow = Workflow(steps, f"{self.name}_workflow") # Compile and run using WIC python API workflow.compile() workflow.run() + self._move_outputs() logger.info("Completed CWL nuclear segmentation workflow.") - return \ No newline at end of file + return diff --git a/src/image/workflows/cwl_visualization.py b/src/image/workflows/cwl_visualization.py new file mode 100644 index 0000000..f3bf589 --- /dev/null +++ b/src/image/workflows/cwl_visualization.py @@ -0,0 +1,219 @@ +import logging +import re +import shutil +from pathlib import Path +import typing +import yaml + +from sophios.api.pythonapi import Step, Workflow +import polus.tools.plugins as pp +from image.workflows.utils import OUT_PATH,MANIFEST_URLS + + +# Initialize the logger +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class CWLVisualizationWorkflow: + """ + A CWL visualization pipeline to process imaging datasets. + + Attributes: + name: Name of the imaging dataset. + file_pattern: Pattern for parsing raw filenames. + out_file_pattern: Desired format for output filenames. + image_pattern: Pattern for parsing intensity image filenames. + seg_pattern: Pattern to parse segmentation image filenames. + layout : A list indicating the grid layout. + pyramid_type : A list indicating the grid layout. + ff_pattern: Filename pattern for selecting flatfield components. + df_pattern: Filename pattern for selecting darkfield components. + group_by: Variables used for grouping the file pattern. + background_correction: Flag to enable background correction. + map_directory: Enable mapping of folder names. + out_dir: Directory for saving outputs. + """ + def __init__( + self, + name: str, + file_pattern: str, + out_file_pattern: str, + image_pattern: str, + seg_pattern: str, + layout: str, + pyramid_type: str, + image_type: str, + ff_pattern: typing.Optional[str] = '', + df_pattern: typing.Optional[str] = '', + group_by: typing.Optional[str] = '', + map_directory: typing.Optional[bool] = False, + background_correction: typing.Optional[bool] = False, + out_dir: typing.Optional[Path] = OUT_PATH + ): + self.name = name + self.file_pattern = file_pattern + self.out_file_pattern = out_file_pattern + self.image_pattern = image_pattern + self.seg_pattern = seg_pattern + self.layout = layout + self.pyramid_type = pyramid_type + self.image_type = image_type + self.ff_pattern = ff_pattern + self.df_pattern = df_pattern + self.group_by = group_by + self.map_directory = map_directory + self.background_correction = background_correction + self.out_dir = out_dir + self.adapters_path = Path(__file__).resolve().parents[4].joinpath("cwl_adapters") + self.work_dir = Path.cwd() + + def _move_outputs(self) -> None: + """Move output files and directories to the specified output directory.""" + logger.info("Moving directories and JSON files to output directory.") + dir_names = {"autogenerated", "cachedir", "provenance", "outdir"} + + # Move specified directories + for directory in self.work_dir.iterdir(): + if directory.name in dir_names: + shutil.move(directory, self.out_dir) + + # Move JSON files + for json_file in self.work_dir.rglob("*.json"): + shutil.move(json_file, self.out_dir) + + @staticmethod + def _to_camel_case(name: str) -> str: + """Convert a plugin name to camel case.""" + return re.sub(r"(_|-)+", " ", name).title().replace(" ", "") + + @staticmethod + def _extract_file_extension(pattern: str) -> str: + """Extract and return the string after a period in the pattern.""" + match = re.search(r"\.(.*)", pattern) + return f".*.{match.group(1)}" if match else "" + + @staticmethod + def _escape_parentheses(pattern: str) -> str: + """Escape parentheses and generate patterns for flatfield and darkfield correction.""" + pattern = re.sub(r"(\()|(\))", r"\\\1\2", pattern) + return pattern.split("_c")[0] + "_c{c:d}" + re.sub(r"\d", "", pattern.split("_c")[1]) + + def create_step(self, plugin_url: str) -> Step: + """Create a step for the workflow based on the plugin manifest.""" + manifest = dict(pp.submit_plugin(plugin_url)) + plugin_version = str(manifest['version']) + cwl_tool = pp.get_plugin(self._to_camel_case(manifest['name']), plugin_version).save_cwl( + self.adapters_path.joinpath(f"{self._to_camel_case(manifest['name'])}.cwl") + ) + self._modify_cwl() + return Step(cwl_tool) + + def _get_manifest_url(self, plugin_name: str) -> str: + """Retrieve the URL for the plugin manifest from GitHub.""" + + return MANIFEST_URLS.get(plugin_name, "") + + def _modify_cwl(self) -> None: + """Modify CWL files to include environmental variables and permissions.""" + for cwl_file in self.adapters_path.rglob("*.cwl"): + if "cwl" in cwl_file.name: + try: + with cwl_file.open("r") as file: + config = yaml.safe_load(file) + config.setdefault("requirements", {}) + config["requirements"]["NetworkAccess"] = {"networkAccess": True} + config["requirements"]["EnvVarRequirement"] = {"envDef": {"HOME": "/home/polusai"}} + with cwl_file.open("w") as out_file: + yaml.dump(config, out_file) + except FileNotFoundError: + logger.error(f"Error processing file: {cwl_file}") + + def workflow(self) -> None: + """ + A CWL visualization pipeline. + """ + # Step: BBBC Download + bbbc = self.create_step(self._get_manifest_url("bbbc_download")) + bbbc.name = self.name + bbbc.outDir = Path("bbbc.outDir") + + # Step: File Renaming + rename = self.create_step(self._get_manifest_url("file_renaming")) + rename.filePattern = self.file_pattern + rename.outFilePattern = self.out_file_pattern + rename.mapDirectory = self.map_directory + rename.inpDir = bbbc.outDir + rename.outDir = Path("rename.outDir") + + # Step: OME Converter + ome_converter = self.create_step(self._get_manifest_url("ome_converter")) + ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) + ome_converter.fileExtension = ".ome.tif" + ome_converter.inpDir = rename.outDir + ome_converter.outDir = Path("ome_converter.outDir") + + # Optional: Background correction + if self.background_correction: + estimate_flatfield = self.create_step(self._get_manifest_url("estimate_flatfield")) + estimate_flatfield.inpDir = ome_converter.outDir + estimate_flatfield.filePattern = self.image_pattern + estimate_flatfield.groupBy = self.group_by + estimate_flatfield.getDarkfield = True + estimate_flatfield.outDir = Path("estimate_flatfield.outDir") + + # Apply Flatfield + apply_flatfield = self.create_step(self._get_manifest_url("apply_flatfield")) + apply_flatfield.imgDir = ome_converter.outDir + apply_flatfield.imgPattern = self.image_pattern + apply_flatfield.ffDir = estimate_flatfield.outDir + apply_flatfield.ffPattern = self.ff_pattern + apply_flatfield.dfPattern = self.df_pattern + apply_flatfield.outDir = Path("apply_flatfield.outDir") + + + # Montage + montage = self.create_step(self._get_manifest_url("montage_url")) + montage.inpDir = apply_flatfield.outDir if self.background_correction else ome_converter.outDir + montage.filePattern = self.image_pattern + montage.layout = self.layout + montage.outDir = Path("montage.outDir") + + # # Image Assembler + image_assembler = self.create_step( + self._get_manifest_url("image_assembler_url") + ) + image_assembler.imgPath = apply_flatfield.outDir if self.background_correction else ome_converter.outDir + image_assembler.stitchPath = montage.outDir + image_assembler.outDir = Path("image_assembler.outDir") + + + # Precompute Slide + precompute_slide = self.create_step( + self._get_manifest_url("precompute_slide_url") + ) + precompute_slide.pyramidType = self.pyramid_type + precompute_slide.imageType = self.image_type + precompute_slide.inpDir = image_assembler.outDir + precompute_slide.outDir = Path("precompute_slide.outDir") + + logger.info("Initiating CWL Visualization Workflow!!!") + steps = [ + bbbc, + rename, + ome_converter, + estimate_flatfield if self.background_correction else None, + apply_flatfield if self.background_correction else None, + montage, + image_assembler, + precompute_slide + ] + workflow = Workflow(steps, f"{self.name}_workflow") + # Compile and run using WIC python API + + workflow.compile() + workflow.run() + self._move_outputs() + logger.info("Completed CWL nuclear segmentation workflow.") + + return \ No newline at end of file diff --git a/src/image/workflows/utils.py b/src/image/workflows/utils.py index 7daa9b7..9ab3b70 100644 --- a/src/image/workflows/utils.py +++ b/src/image/workflows/utils.py @@ -7,9 +7,49 @@ GITHUB_TAG = "https://raw.githubusercontent.com" +OUT_PATH = Path(__file__).resolve().parents[3] -ANALYSIS_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "features", "file_extension", "background_correction"] -SEG_KEYS = ["name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", "ff_pattern", "df_pattern", "group_by", "map_directory", "background_correction"] + + +MANIFEST_URLS = { + "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", + "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/formats/ome-converter-tool/plugin.json", + "estimate_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/regression/basic-flatfield-estimation-tool/plugin.json", + "apply_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/apply-flatfield-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", + "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/nyxus_fix_entrypoint/features/nyxus-tool/plugin.json", + "montage_url" :f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/montage-tool/plugin.json", + "image_assembler_url": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/image-assembler-tool/plugin.json", + "precompute_slide_url": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/visualization/precompute-slide-tool/plugin.json" + } + + +# Define keys as frozensets for immutability +ANALYSIS_KEYS = frozenset([ + "name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", + "ff_pattern", "df_pattern", "group_by", "map_directory", "features", + "file_extension", "background_correction" +]) + +SEG_KEYS = frozenset([ + "name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", + "ff_pattern", "df_pattern", "group_by", "map_directory", "background_correction" +]) + +VIZ_KEYS = frozenset([ + "name", "file_pattern", "out_file_pattern", "image_pattern", "seg_pattern", + "layout", "pyramid_type", "image_type", "ff_pattern", "df_pattern", "group_by", + "map_directory", "background_correction" +]) + +# Mapping workflows to their respective keys +WORKFLOW_KEYS = { + "analysis": ANALYSIS_KEYS, + "segmentation": SEG_KEYS, + "visualization": VIZ_KEYS, +} class DataModel(pydantic.BaseModel): @@ -17,52 +57,44 @@ class DataModel(pydantic.BaseModel): class LoadYaml(pydantic.BaseModel): - """Validation of Dataset yaml.""" - workflow:str + """Validation of Dataset YAML.""" + workflow: str config_path: Union[str, Path] @pydantic.validator("config_path", pre=True) @classmethod - def validate_path(cls, value: Union[str, Path]) -> Union[str, Path]: - """Validation of Paths.""" - if not Path(value).exists(): - msg = f"{value} does not exist! Please do check it again" - raise ValueError(msg) - if isinstance(value, str): - return Path(value) - return value - + def validate_path(cls, value: Union[str, Path]) -> Path: + """Validate the configuration file path.""" + path = Path(value) + if not path.exists(): + raise ValueError(f"{value} does not exist! Please check the path again.") + return path + @pydantic.validator("workflow", pre=True) @classmethod def validate_workflow_name(cls, value: str) -> str: - """Validation of workflow name.""" - if not value in ["analysis", "segmentation", "visualization"]: - msg = f"Please choose a valid workflow name i-e analysis segmentation visualization" - raise ValueError(msg) + """Validate workflow name.""" + valid_workflows = WORKFLOW_KEYS.keys() + if value not in valid_workflows: + raise ValueError(f"Invalid workflow: {value}. Please choose one of {', '.join(valid_workflows)}.") return value def parse_yaml(self) -> Dict[str, Union[str, bool]]: - """Parsing yaml configuration file for each dataset.""" - - with open(f'{self.config_path}','r') as f: + """Parse the YAML configuration file for each dataset.""" + with open(self.config_path, 'r') as f: data = yaml.safe_load(f) - check_values = any([v for _, v in data.items() if f is None]) + # Check missing values in the YAML + if any(v is None for v in data.values()): + raise ValueError("All parameters are not defined! Please check the YAML file.") - if check_values is True: - msg = f"All the parameters are not defined! Please do check it again" - raise ValueError(msg) - + # Validate keys against the workflow's expected keys + self._validate_workflow_keys(data) - if self.workflow == "analysis": - if data['background_correction'] == True: - if list(data.keys()) != ANALYSIS_KEYS: - msg = f"Please do check parameters again for analysis workflow!!" - raise ValueError(msg) - - if self.workflow == "segmentation": - if data['background_correction'] == True: - if list(data.keys()) != SEG_KEYS: - msg = f"Please do check parameters again for segmentation workflow!!" - raise ValueError(msg) return data + + def _validate_workflow_keys(self, data: Dict[str, Union[str, bool]]) -> None: + """Validate that the keys in the YAML match the expected keys for the selected workflow.""" + expected_keys = WORKFLOW_KEYS[self.workflow] + if data.get("background_correction", False) and set(data.keys()) != expected_keys: + raise ValueError(f"Invalid parameters for {self.workflow} workflow. Expected keys: {expected_keys}") \ No newline at end of file From 5dc35ceeeb517d5a848f2e94a2d5f8c92ececa77 Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Mon, 30 Sep 2024 18:09:58 -0400 Subject: [PATCH 06/11] fix path and updated documentation --- README.md | 78 +++++++++++++------ src/image/workflows/cwl_analysis.py | 10 ++- .../workflows/cwl_nuclear_segmentation.py | 8 +- src/image/workflows/cwl_visualization.py | 10 ++- src/image/workflows/utils.py | 4 +- 5 files changed, 75 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index a9bef17..592f585 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,56 @@ -# Common Workflow Language (CWL) Workflows +# Sophios Workflows for Imaging Datasets CWL feature extraction workflow for imaging dataset -## Workflow Steps: +## Workflow Stepup: -create a [Conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) environment using python = ">=3.9,<3.12" +Create a [Conda](https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#activating-an-environment) environment with Python between versions 3.9 and 3.11 -#### 1. Install polus-plugins. +``` +conda create -n image-workflow-env python=">=3.9,<3.12" +conda activate image-workflow-env +``` -- clone a image-tools repository -`git clone https://github.com/camilovelezr/image-tools.git ../` -- cd `image-tools` -- create a new branch -`git checkout -b hd2 remotes/origin/hd2` +#### 1. Install image-workflows. + +- clone the image-workflows repository +```bash +git clone https://github.com/PolusAI/image-workflows.git +``` +- Navigate to the cloned directory `image-workflows` +```bash +cd image-workflows +``` +- Switch to the sophios_workflow branch: +```bash +git checkout -b sophios_workflow remotes/origin/sophios_workflow +``` - `pip install .` -#### 2. Install workflow-inference-compiler. -- clone a workflow-inference-compiler repository -`git clone https://github.com/camilovelezr/workflow-inference-compiler.git ../` -- cd `workflow-inference-compiler` -- create a new branch -`git checkout -b hd2 remotes/origin/hd2` -- `pip install -e ".[all]"` +#### 3. Generate and Configure the JSON Configuration File. +Execute the following command to generate the configuration file in your home directory `/home/user/wic/` -#### 3. Install image-workflow. -- cd `image-workflows` -- poetry install +```bash +sophios --generate_config +``` + +Directory and file structure after running the command: +``` +├── cwl_adapters +├── examples +└── global_config.json + +``` +Edit the `global_config.json` file to update the `search_paths_cwl`: + +``` + "search_paths_cwl": { + "global": [ + "/home/user/image-workflows/cwl_adapters" + ], + "gpu": [] + }, +``` #### Note: Ensure that the [docker-desktop](https://www.docker.com/products/docker-desktop/) is running in the background. To verify that it's operational, you can use the following command: @@ -33,7 +58,7 @@ Ensure that the [docker-desktop](https://www.docker.com/products/docker-desktop/ This command will launch the `docker/getting-started container` in detached mode (-d flag), exposing port 80 on your local machine (-p 80:80). It's a simple way to test if Docker Desktop is functioning correctly. ## Details -This workflow integrates eight distinct plugins, starting from data retrieval from [Broad Bioimage Benchmark Collection](https://bbbc.broadinstitute.org/), renaming files, correcting uneven illumination, segmenting nuclear objects, and culminating in the extraction of features from identified objects +This workflow integrates eleven distinct plugins, starting from data retrieval from [Broad Bioimage Benchmark Collection](https://bbbc.broadinstitute.org/), renaming files, correcting uneven illumination, segmenting nuclear objects, and culminating in the extraction of features from identified objects Below are the specifics of the plugins employed in the workflow 1. [bbbc-download-plugin](https://github.com/saketprem/polus-plugins/tree/bbbc_download/utils/bbbc-download-plugin) @@ -43,21 +68,26 @@ Below are the specifics of the plugins employed in the workflow 5. [apply-flatfield-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/apply-flatfield-tool) 6. [kaggle-nuclei-segmentation](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/segmentation/kaggle-nuclei-segmentation) 7. [polus-ftl-label-plugin](https://github.com/hamshkhawar/image-tools/tree/kaggle-nuclei_seg/transforms/images/polus-ftl-label-plugin) -8. [nyxus-plugin](https://github.com/PolusAI/image-tools/tree/kaggle-nuclei_seg/features/nyxus-plugin) +8. [nyxus-tool](https://github.com/PolusAI/image-tools/tree/master/features/nyxus-tool) +9. [montage-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/montage-tool) +10. [image-assembler-tool](https://github.com/PolusAI/image-tools/tree/master/transforms/images/image-assembler-tool) +11. [precompute-slide-tool](https://github.com/PolusAI/image-tools/tree/master/visualization/precompute-slide-tool) + ## Execute CWL workflows Three different CWL workflows can be executed for specific datasets 1. segmentation 2. analysis +3. visualization During the execution of the segmentation workflow, `1 to 7` plugins will be utilized. However, for executing the analysis workflow, `1 to 8` plugins will be employed. If a user wishes to execute a workflow for a new dataset, they can utilize a sample YAML file to input parameter values. This YAML file can be saved in the desired subdirectory of the `configuration` folder with the name `dataset.yml` If a user opts to run a workflow without background correction, they can set `background_correction` to false. In this case, the workflow will skip steps `4 and 5` -`python -m polus.image.workflows --name="BBBC001" --workflow=analysis` +`python -m image.workflows --name="BBBC001" --workflow=analysis --outDir=path/to/outputs` -A directory named `outputs` is generated, encompassing CLTs for each plugin, YAML files, and all outputs are stored within the `outdir` directory. +All outputs are stored within the `outDir` directory. ``` outputs ├── experiment @@ -102,4 +132,4 @@ outputs ``` #### Note: -Step 7 and step 8 are executed only in the case of the `analysis` workflow. \ No newline at end of file +Step 7 and step 8 are executed only in the case of the `analysis` workflow. `9-11` plugins will be used in `visualization` workflow \ No newline at end of file diff --git a/src/image/workflows/cwl_analysis.py b/src/image/workflows/cwl_analysis.py index b41b6e3..57bb9b4 100644 --- a/src/image/workflows/cwl_analysis.py +++ b/src/image/workflows/cwl_analysis.py @@ -36,6 +36,7 @@ class CWLAnalysisWorkflow: """ def __init__( self, + work_dir: Path, name: str, file_pattern: str, out_file_pattern: str, @@ -63,8 +64,10 @@ def __init__( self.file_extension=file_extension self.background_correction = background_correction self.out_dir = out_dir - self.adapters_path = Path(__file__).parent.parent.parent.parent.joinpath("cwl_adapters") - self.work_dir = Path.cwd() + self.work_dir = work_dir + self.adapters_path = self.work_dir.joinpath("cwl_adapters") + if not self.adapters_path.exists(): + self.adapters_path.mkdir(exist_ok=True, parents=True) def _move_outputs(self) -> None: """Move output files and directories to the specified output directory.""" @@ -146,7 +149,7 @@ def workflow(self) -> None: # Step: OME Converter ome_converter = self.create_step(self._get_manifest_url("ome_converter")) ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) - ome_converter.fileExtension = ".ome.tif" + # ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") @@ -183,7 +186,6 @@ def workflow(self) -> None: # # ## Nyxus Plugin nyxus_plugin = self.create_step(self._get_manifest_url("nyxus_plugin")) - # nyxus_plugin = Step(clt_path='/Users/abbasih2/Documents/Job/Axle_Work/image-workflows/cwl_adapters/NyxusPlugin.cwl') nyxus_plugin.inpDir = apply_flatfield.outDir if self.background_correction else ome_converter.outDir nyxus_plugin.segDir = ftl_plugin.outDir nyxus_plugin.intPattern = self.image_pattern diff --git a/src/image/workflows/cwl_nuclear_segmentation.py b/src/image/workflows/cwl_nuclear_segmentation.py index cb75a3c..12fac7b 100644 --- a/src/image/workflows/cwl_nuclear_segmentation.py +++ b/src/image/workflows/cwl_nuclear_segmentation.py @@ -20,6 +20,7 @@ class CWLSegmentationWorkflow: A CWL Nuclear Segmentation pipeline to process imaging datasets. Attributes: + work_dir: Path to working directory. name: Name of the imaging dataset. file_pattern: Pattern for parsing raw filenames. out_file_pattern: Desired format for output filenames. @@ -34,6 +35,7 @@ class CWLSegmentationWorkflow: """ def __init__( self, + work_dir: Path, name: str, file_pattern: str, out_file_pattern: str, @@ -57,8 +59,10 @@ def __init__( self.map_directory = map_directory self.background_correction = background_correction self.out_dir = out_dir - self.adapters_path = Path(__file__).parent.parent.parent.parent.joinpath("cwl_adapters") - self.work_dir = Path.cwd() + self.work_dir = work_dir + self.adapters_path = self.work_dir.joinpath("cwl_adapters") + if not self.adapters_path.exists(): + self.adapters_path.mkdir(exist_ok=True, parents=True) def _move_outputs(self) -> None: """Move output files and directories to the specified output directory.""" diff --git a/src/image/workflows/cwl_visualization.py b/src/image/workflows/cwl_visualization.py index f3bf589..11fb8a3 100644 --- a/src/image/workflows/cwl_visualization.py +++ b/src/image/workflows/cwl_visualization.py @@ -20,6 +20,7 @@ class CWLVisualizationWorkflow: A CWL visualization pipeline to process imaging datasets. Attributes: + work_dir: Path to working directory. name: Name of the imaging dataset. file_pattern: Pattern for parsing raw filenames. out_file_pattern: Desired format for output filenames. @@ -36,6 +37,7 @@ class CWLVisualizationWorkflow: """ def __init__( self, + work_dir: Path, name: str, file_pattern: str, out_file_pattern: str, @@ -65,8 +67,10 @@ def __init__( self.map_directory = map_directory self.background_correction = background_correction self.out_dir = out_dir - self.adapters_path = Path(__file__).resolve().parents[4].joinpath("cwl_adapters") - self.work_dir = Path.cwd() + self.work_dir = work_dir + self.adapters_path = self.work_dir.joinpath("cwl_adapters") + if not self.adapters_path.exists(): + self.adapters_path.mkdir(exist_ok=True, parents=True) def _move_outputs(self) -> None: """Move output files and directories to the specified output directory.""" @@ -149,7 +153,7 @@ def workflow(self) -> None: # Step: OME Converter ome_converter = self.create_step(self._get_manifest_url("ome_converter")) ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) - ome_converter.fileExtension = ".ome.tif" + # ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") diff --git a/src/image/workflows/utils.py b/src/image/workflows/utils.py index 9ab3b70..465fda0 100644 --- a/src/image/workflows/utils.py +++ b/src/image/workflows/utils.py @@ -7,14 +7,14 @@ GITHUB_TAG = "https://raw.githubusercontent.com" -OUT_PATH = Path(__file__).resolve().parents[3] +OUT_PATH = Path.cwd() MANIFEST_URLS = { "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/formats/ome-converter-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/fix_endian_bug_omeconverter/formats/ome-converter-tool/plugin.json", "estimate_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/regression/basic-flatfield-estimation-tool/plugin.json", "apply_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/apply-flatfield-tool/plugin.json", "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", From 24f7f83c6dcb4fecdbd3e0d3832a496cd6625e3c Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Tue, 1 Oct 2024 15:41:52 -0400 Subject: [PATCH 07/11] updating plugin manifest --- src/image/workflows/__main__.py | 27 ++++++++++--------- src/image/workflows/cwl_analysis.py | 2 +- .../workflows/cwl_nuclear_segmentation.py | 4 ++- src/image/workflows/utils.py | 4 +-- 4 files changed, 20 insertions(+), 17 deletions(-) diff --git a/src/image/workflows/__main__.py b/src/image/workflows/__main__.py index 7e25851..9f8f0e3 100644 --- a/src/image/workflows/__main__.py +++ b/src/image/workflows/__main__.py @@ -50,33 +50,34 @@ def main( logger.info(f"workflow = {workflow}") logger.info(f"outDir = {out_dir}") - config_path = Path(__file__).resolve().parents[3].joinpath(f"configuration/{workflow}/{name}.yml") + config_path = Path.cwd().joinpath(f"configuration/{workflow}/{name}.yml") + work_dir = Path.cwd() model = LoadYaml(workflow=workflow, config_path=config_path) params = model.parse_yaml() if out_dir == None: - out_dir = Path(__file__).parent.parent.parent.parent + out_dir = Path.cwd() params["out_dir"] = out_dir + params["work_dir"] = work_dir + if workflow == "analysis": + logger.info(f"Executing {workflow}!!!") + model = CWLAnalysisWorkflow(**params) + model.workflow() - # if workflow == "analysis": - # logger.info(f"Executing {workflow}!!!") - # model = CWLAnalysisWorkflow(**params) - # model.workflow() + if workflow == "segmentation": + logger.info(f"Executing {workflow}!!!") + model = CWLSegmentationWorkflow(**params) + model.workflow() - # if workflow == "segmentation": + # if workflow == "visualization": # logger.info(f"Executing {workflow}!!!") - # model = CWLSegmentationWorkflow(**params) + # model = CWLVisualizationWorkflow(**params) # model.workflow() - if workflow == "visualization": - logger.info(f"Executing {workflow}!!!") - model = CWLVisualizationWorkflow(**params) - model.workflow() - logger.info("Completed CWL workflow!!!") diff --git a/src/image/workflows/cwl_analysis.py b/src/image/workflows/cwl_analysis.py index 57bb9b4..167871f 100644 --- a/src/image/workflows/cwl_analysis.py +++ b/src/image/workflows/cwl_analysis.py @@ -149,7 +149,7 @@ def workflow(self) -> None: # Step: OME Converter ome_converter = self.create_step(self._get_manifest_url("ome_converter")) ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) - # ome_converter.fileExtension = ".ome.tif" + ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") diff --git a/src/image/workflows/cwl_nuclear_segmentation.py b/src/image/workflows/cwl_nuclear_segmentation.py index 12fac7b..961bbd3 100644 --- a/src/image/workflows/cwl_nuclear_segmentation.py +++ b/src/image/workflows/cwl_nuclear_segmentation.py @@ -139,6 +139,7 @@ def workflow(self) -> None: rename.outFilePattern = self.out_file_pattern rename.mapDirectory = self.map_directory rename.inpDir = bbbc.outDir + # rename.inpDir = Path("/Users/abbasih2/Desktop/inp") rename.outDir = Path("rename.outDir") # Step: OME Converter @@ -180,7 +181,8 @@ def workflow(self) -> None: # Run the workflow steps = [ - bbbc, rename, ome_converter, + bbbc, + rename, ome_converter, estimate_flatfield if self.background_correction else None, apply_flatfield if self.background_correction else None, kaggle_segmentation, diff --git a/src/image/workflows/utils.py b/src/image/workflows/utils.py index 465fda0..52b9b77 100644 --- a/src/image/workflows/utils.py +++ b/src/image/workflows/utils.py @@ -14,10 +14,10 @@ MANIFEST_URLS = { "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/fix_endian_bug_omeconverter/formats/ome-converter-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/formats/ome-converter-tool/plugin.json", "estimate_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/regression/basic-flatfield-estimation-tool/plugin.json", "apply_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/apply-flatfield-tool/plugin.json", - "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/kaggle-nucleiseg/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", + "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/kaggle_update_dependencies/segmentation/kaggle-nuclei-segmentation-tool/plugin.json", "ftl_plugin": f"{GITHUB_TAG}/nishaq503/image-tools/fix/ftl-label/transforms/images/polus-ftl-label-plugin/plugin.json", "nyxus_plugin": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/nyxus_fix_entrypoint/features/nyxus-tool/plugin.json", "montage_url" :f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/montage-tool/plugin.json", From 0d7aa6680e79c88d9c6cb8a3dc35c7fd83a0d80d Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Wed, 2 Oct 2024 13:11:53 -0400 Subject: [PATCH 08/11] refactored code --- cwl_adapters/BbbcDownload.cwl | 29 +++++++ cwl_adapters/FileRenaming.cwl | 41 +++++++++ cwl_adapters/FtlLabel.cwl | 37 ++++++++ cwl_adapters/ImageAssembler.cwl | 41 +++++++++ cwl_adapters/KaggleNucleiSegmentation.cwl | 37 ++++++++ cwl_adapters/Montage.cwl | 49 +++++++++++ cwl_adapters/NyxusPlugin.cwl | 61 +++++++++++++ cwl_adapters/OmeConverter.cwl | 37 ++++++++ cwl_adapters/PrecomputeSlide.cwl | 41 +++++++++ pyproject.toml | 7 +- src/image/workflows/__init__.py | 3 +- src/image/workflows/__main__.py | 86 +++++++++++++------ src/image/workflows/cwl_analysis.py | 3 +- .../workflows/cwl_nuclear_segmentation.py | 1 - src/image/workflows/cwl_visualization.py | 6 +- 15 files changed, 445 insertions(+), 34 deletions(-) create mode 100644 cwl_adapters/BbbcDownload.cwl create mode 100644 cwl_adapters/FileRenaming.cwl create mode 100644 cwl_adapters/FtlLabel.cwl create mode 100644 cwl_adapters/ImageAssembler.cwl create mode 100644 cwl_adapters/KaggleNucleiSegmentation.cwl create mode 100644 cwl_adapters/Montage.cwl create mode 100644 cwl_adapters/NyxusPlugin.cwl create mode 100644 cwl_adapters/OmeConverter.cwl create mode 100644 cwl_adapters/PrecomputeSlide.cwl diff --git a/cwl_adapters/BbbcDownload.cwl b/cwl_adapters/BbbcDownload.cwl new file mode 100644 index 0000000..9eeba93 --- /dev/null +++ b/cwl_adapters/BbbcDownload.cwl @@ -0,0 +1,29 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + name: + inputBinding: + prefix: --name + type: string + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/bbbc-download-plugin:0.1.0-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/FileRenaming.cwl b/cwl_adapters/FileRenaming.cwl new file mode 100644 index 0000000..1e154ed --- /dev/null +++ b/cwl_adapters/FileRenaming.cwl @@ -0,0 +1,41 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + mapDirectory: + inputBinding: + prefix: --mapDirectory + type: boolean? + outDir: + inputBinding: + prefix: --outDir + type: Directory + outFilePattern: + inputBinding: + prefix: --outFilePattern + type: string +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/file-renaming-tool:0.2.4-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/FtlLabel.cwl b/cwl_adapters/FtlLabel.cwl new file mode 100644 index 0000000..227be2a --- /dev/null +++ b/cwl_adapters/FtlLabel.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + binarizationThreshold: + inputBinding: + prefix: --binarizationThreshold + type: double + connectivity: + inputBinding: + prefix: --connectivity + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/ftl-label-plugin:0.3.12-dev5 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/ImageAssembler.cwl b/cwl_adapters/ImageAssembler.cwl new file mode 100644 index 0000000..8b06233 --- /dev/null +++ b/cwl_adapters/ImageAssembler.cwl @@ -0,0 +1,41 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + imgPath: + inputBinding: + prefix: --imgPath + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory + preview: + inputBinding: + prefix: --preview + type: boolean? + stitchPath: + inputBinding: + prefix: --stitchPath + type: Directory + timesliceNaming: + inputBinding: + prefix: --timesliceNaming + type: boolean? +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/image-assembler-tool:1.4.2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/KaggleNucleiSegmentation.cwl b/cwl_adapters/KaggleNucleiSegmentation.cwl new file mode 100644 index 0000000..66ec970 --- /dev/null +++ b/cwl_adapters/KaggleNucleiSegmentation.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string? + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory + preview: + inputBinding: + prefix: --preview + type: boolean? +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/kaggle-nuclei-segmentation-tool:0.1.5-dev1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/Montage.cwl b/cwl_adapters/Montage.cwl new file mode 100644 index 0000000..e3ee6ea --- /dev/null +++ b/cwl_adapters/Montage.cwl @@ -0,0 +1,49 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string + flipAxis: + inputBinding: + prefix: --flipAxis + type: string? + gridSpacing: + inputBinding: + prefix: --gridSpacing + type: string? + imageSpacing: + inputBinding: + prefix: --imageSpacing + type: string? + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + layout: + inputBinding: + prefix: --layout + type: string? + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/montage-tool:0.5.1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/NyxusPlugin.cwl b/cwl_adapters/NyxusPlugin.cwl new file mode 100644 index 0000000..4d28a53 --- /dev/null +++ b/cwl_adapters/NyxusPlugin.cwl @@ -0,0 +1,61 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + features: + inputBinding: + prefix: --features + type: string? + fileExtension: + inputBinding: + prefix: --fileExtension + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + intPattern: + inputBinding: + prefix: --intPattern + type: string + neighborDist: + inputBinding: + prefix: --neighborDist + type: double? + outDir: + inputBinding: + prefix: --outDir + type: Directory + pixelPerMicron: + inputBinding: + prefix: --pixelPerMicron + type: double? + segDir: + inputBinding: + prefix: --segDir + type: Directory + segPattern: + inputBinding: + prefix: --segPattern + type: string + singleRoi: + inputBinding: + prefix: --singleRoi + type: boolean? +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/nyxus-tool:0.1.8 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/OmeConverter.cwl b/cwl_adapters/OmeConverter.cwl new file mode 100644 index 0000000..66cefde --- /dev/null +++ b/cwl_adapters/OmeConverter.cwl @@ -0,0 +1,37 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + fileExtension: + inputBinding: + prefix: --fileExtension + type: string + filePattern: + inputBinding: + prefix: --filePattern + type: string + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/PrecomputeSlide.cwl b/cwl_adapters/PrecomputeSlide.cwl new file mode 100644 index 0000000..4dc1234 --- /dev/null +++ b/cwl_adapters/PrecomputeSlide.cwl @@ -0,0 +1,41 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string? + imageType: + inputBinding: + prefix: --imageType + type: string? + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory + pyramidType: + inputBinding: + prefix: --pyramidType + type: string +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/precompute-slide-tool:1.7.2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/pyproject.toml b/pyproject.toml index b0d1cb8..ba436a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "image-workflows" -version = "0.1.1-dev1" +version = "0.1.2-dev0" description = "Build and execute pipelines of polus plugins on Compute." authors = ["Hamdah Shafqat Abbasi "] readme = "README.md" @@ -11,10 +11,11 @@ python = ">=3.9,<3.12" typer = "^0.9.0" pyyaml = "^6.0.1" pydantic = "^2.6.1" +graphviz = "^0.20.3" +toil = "7.0.0" sophios = "^0.1.4" cwlref-runner = "1.0" -cwltool = "^3.1.20240909164951" -graphviz = "^0.20.3" +cwltool = "3.1.20240508115724" polus-tools = { git = "https://github.com/PolusAI/tools.git" } diff --git a/src/image/workflows/__init__.py b/src/image/workflows/__init__.py index 15527f6..d50ea40 100644 --- a/src/image/workflows/__init__.py +++ b/src/image/workflows/__init__.py @@ -1,2 +1,3 @@ from .cwl_analysis import CWLAnalysisWorkflow -from .cwl_nuclear_segmentation import CWLSegmentationWorkflow \ No newline at end of file +from .cwl_nuclear_segmentation import CWLSegmentationWorkflow +from .cwl_visualization import CWLVisualizationWorkflow \ No newline at end of file diff --git a/src/image/workflows/__main__.py b/src/image/workflows/__main__.py index 9f8f0e3..99013b2 100644 --- a/src/image/workflows/__main__.py +++ b/src/image/workflows/__main__.py @@ -22,6 +22,14 @@ logger.setLevel(logging.INFO) +# Mapping of workflow names to their corresponding classes +WORKFLOW_CLASSES = { + "analysis": CWLAnalysisWorkflow, + "segmentation": CWLSegmentationWorkflow, + "visualization": CWLVisualizationWorkflow +} + + @app.command() def main( name: str = typer.Option( @@ -44,7 +52,14 @@ def main( ) ) -> None: - """Execute CWL Workflow.""" + """ + Execute the specified CWL Workflow. + + Attributes: + name (str): The name of the imaging dataset. + workflow (str): The name of the CWL workflow to execute. + out_dir (Path): The output directory for workflow results. + """ logger.info(f"name = {name}") logger.info(f"workflow = {workflow}") @@ -53,33 +68,54 @@ def main( config_path = Path.cwd().joinpath(f"configuration/{workflow}/{name}.yml") work_dir = Path.cwd() - - - model = LoadYaml(workflow=workflow, config_path=config_path) - params = model.parse_yaml() - if out_dir == None: - out_dir = Path.cwd() - params["out_dir"] = out_dir - params["work_dir"] = work_dir - - - if workflow == "analysis": - logger.info(f"Executing {workflow}!!!") - model = CWLAnalysisWorkflow(**params) - model.workflow() - - if workflow == "segmentation": - logger.info(f"Executing {workflow}!!!") - model = CWLSegmentationWorkflow(**params) - model.workflow() - - # if workflow == "visualization": - # logger.info(f"Executing {workflow}!!!") - # model = CWLVisualizationWorkflow(**params) + try: + # Load the YAML configuration + model = LoadYaml(workflow=workflow, config_path=config_path) + params = model.parse_yaml() + + # Set output directory + out_dir = out_dir or Path.cwd() + params["out_dir"] = out_dir + params["work_dir"] = work_dir + + # Get the workflow class + workflow_class = WORKFLOW_CLASSES.get(workflow) + if not workflow_class: + logger.error(f"Workflow '{workflow}' is not recognized. Available workflows: {list(WORKFLOW_CLASSES.keys())}") + raise ValueError(f"Unknown workflow: {workflow}") + + logger.info(f"Executing {workflow} workflow.") + # Initialize and execute the workflow class + workflow_instance = workflow_class(**params) + workflow_instance.workflow() + + except FileNotFoundError as e: + logger.error(f"Configuration file not found: {e}") + except Exception as e: + logger.error(f"An error occurred while executing the workflow: {e}") + raise + + # model = LoadYaml(workflow=workflow, config_path=config_path) + # params = model.parse_yaml() + + # out_dir = out_dir or Path.cwd() + + # params["out_dir"] = out_dir + # params["work_dir"] = work_dir + + # # Validate workflow and execute corresponding class + # workflow_class = WORKFLOW_CLASSES.get(workflow) + # print(workflow_class) + # if workflow_class: + # logger.info(f"Executing {workflow} workflow.") + # model = workflow_class(**params) # model.workflow() + # else: + # logger.error(f"Invalid workflow: {workflow}. Available options: {', '.join(WORKFLOW_CLASSES.keys())}") + # raise ValueError(f"Workflow '{workflow}' is not recognized.") + # logger.info(f"Completed {workflow} workflow execution!") - logger.info("Completed CWL workflow!!!") if __name__ == "__main__": diff --git a/src/image/workflows/cwl_analysis.py b/src/image/workflows/cwl_analysis.py index 167871f..a6bc0b0 100644 --- a/src/image/workflows/cwl_analysis.py +++ b/src/image/workflows/cwl_analysis.py @@ -197,6 +197,7 @@ def workflow(self) -> None: nyxus_plugin.outDir = Path("nyxus_plugin.outDir") # Run the workflow + steps = [ bbbc, rename, ome_converter, estimate_flatfield if self.background_correction else None, @@ -211,5 +212,5 @@ def workflow(self) -> None: workflow.compile() workflow.run() self._move_outputs() - logger.info("Completed CWL nuclear segmentation workflow.") + logger.info("Completed CWL analysis workflow.") return diff --git a/src/image/workflows/cwl_nuclear_segmentation.py b/src/image/workflows/cwl_nuclear_segmentation.py index 961bbd3..3ba1087 100644 --- a/src/image/workflows/cwl_nuclear_segmentation.py +++ b/src/image/workflows/cwl_nuclear_segmentation.py @@ -139,7 +139,6 @@ def workflow(self) -> None: rename.outFilePattern = self.out_file_pattern rename.mapDirectory = self.map_directory rename.inpDir = bbbc.outDir - # rename.inpDir = Path("/Users/abbasih2/Desktop/inp") rename.outDir = Path("rename.outDir") # Step: OME Converter diff --git a/src/image/workflows/cwl_visualization.py b/src/image/workflows/cwl_visualization.py index 11fb8a3..cc401a2 100644 --- a/src/image/workflows/cwl_visualization.py +++ b/src/image/workflows/cwl_visualization.py @@ -153,7 +153,7 @@ def workflow(self) -> None: # Step: OME Converter ome_converter = self.create_step(self._get_manifest_url("ome_converter")) ome_converter.filePattern = self._extract_file_extension(self.out_file_pattern) - # ome_converter.fileExtension = ".ome.tif" + ome_converter.fileExtension = ".ome.tif" ome_converter.inpDir = rename.outDir ome_converter.outDir = Path("ome_converter.outDir") @@ -210,7 +210,7 @@ def workflow(self) -> None: apply_flatfield if self.background_correction else None, montage, image_assembler, - precompute_slide + precompute_slide # At the moment precompute slide is not working. PR will be open for the fix ] workflow = Workflow(steps, f"{self.name}_workflow") # Compile and run using WIC python API @@ -218,6 +218,6 @@ def workflow(self) -> None: workflow.compile() workflow.run() self._move_outputs() - logger.info("Completed CWL nuclear segmentation workflow.") + logger.info("Completed CWL visualization workflow.") return \ No newline at end of file From 533ed350104720a9625ec4c6cd8361742c0cd0d8 Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Wed, 2 Oct 2024 13:29:39 -0400 Subject: [PATCH 09/11] updated nyxus CLT --- cwl_adapters/NyxusPlugin.cwl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cwl_adapters/NyxusPlugin.cwl b/cwl_adapters/NyxusPlugin.cwl index 4d28a53..bfbe6bb 100644 --- a/cwl_adapters/NyxusPlugin.cwl +++ b/cwl_adapters/NyxusPlugin.cwl @@ -48,7 +48,7 @@ outputs: type: Directory requirements: DockerRequirement: - dockerPull: polusai/nyxus-tool:0.1.8 + dockerPull: polusai/nyxus-tool:0.1.8-dev0 EnvVarRequirement: envDef: HOME: /home/polusai From 24d21156a31701a719ef087a52cf981378c9b307 Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Thu, 3 Oct 2024 12:01:40 -0400 Subject: [PATCH 10/11] fix CLTs --- .vscode/settings.json | 16 -------- cwl_adapters/ApplyFlatfield.cwl | 49 +++++++++++++++++++++++ cwl_adapters/BasicFlatfieldEstimation.cwl | 41 +++++++++++++++++++ cwl_adapters/KaggleNucleiSegmentation.cwl | 2 +- cwl_adapters/OmeConverter.cwl | 2 +- 5 files changed, 92 insertions(+), 18 deletions(-) delete mode 100644 .vscode/settings.json create mode 100644 cwl_adapters/ApplyFlatfield.cwl create mode 100644 cwl_adapters/BasicFlatfieldEstimation.cwl diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index b76ca5e..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "files.associations": {"*.wic": "yaml"}, - "yaml.schemas": { - "autogenerated/schemas/wic.json": "*.wic" - }, - "yaml.customTags": [ - "!&", - "!*", - "!ii", - "!ii mapping", - "!ii sequence" - ], - "[yaml]": { - "editor.suggest.showWords": false - }, -} \ No newline at end of file diff --git a/cwl_adapters/ApplyFlatfield.cwl b/cwl_adapters/ApplyFlatfield.cwl new file mode 100644 index 0000000..b27cef3 --- /dev/null +++ b/cwl_adapters/ApplyFlatfield.cwl @@ -0,0 +1,49 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + dfPattern: + inputBinding: + prefix: --dfPattern + type: string? + ffDir: + inputBinding: + prefix: --ffDir + type: Directory + ffPattern: + inputBinding: + prefix: --ffPattern + type: string + imgDir: + inputBinding: + prefix: --imgDir + type: Directory + imgPattern: + inputBinding: + prefix: --imgPattern + type: string + outDir: + inputBinding: + prefix: --outDir + type: Directory + preview: + inputBinding: + prefix: --preview + type: boolean? +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/apply-flatfield-tool:2.0.1 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/BasicFlatfieldEstimation.cwl b/cwl_adapters/BasicFlatfieldEstimation.cwl new file mode 100644 index 0000000..c695d3a --- /dev/null +++ b/cwl_adapters/BasicFlatfieldEstimation.cwl @@ -0,0 +1,41 @@ +class: CommandLineTool +cwlVersion: v1.2 +inputs: + filePattern: + inputBinding: + prefix: --filePattern + type: string + getDarkfield: + inputBinding: + prefix: --getDarkfield + type: boolean + groupBy: + inputBinding: + prefix: --groupBy + type: string? + inpDir: + inputBinding: + prefix: --inpDir + type: Directory + outDir: + inputBinding: + prefix: --outDir + type: Directory +outputs: + outDir: + outputBinding: + glob: $(inputs.outDir.basename) + type: Directory +requirements: + DockerRequirement: + dockerPull: polusai/basic-flatfield-estimation-tool:2.1.2 + EnvVarRequirement: + envDef: + HOME: /home/polusai + InitialWorkDirRequirement: + listing: + - entry: $(inputs.outDir) + writable: true + InlineJavascriptRequirement: {} + NetworkAccess: + networkAccess: true diff --git a/cwl_adapters/KaggleNucleiSegmentation.cwl b/cwl_adapters/KaggleNucleiSegmentation.cwl index 66ec970..f27f358 100644 --- a/cwl_adapters/KaggleNucleiSegmentation.cwl +++ b/cwl_adapters/KaggleNucleiSegmentation.cwl @@ -24,7 +24,7 @@ outputs: type: Directory requirements: DockerRequirement: - dockerPull: polusai/kaggle-nuclei-segmentation-tool:0.1.5-dev1 + dockerPull: polusai/kaggle-nuclei-segmentation-tool:0.1.5-dev2 EnvVarRequirement: envDef: HOME: /home/polusai diff --git a/cwl_adapters/OmeConverter.cwl b/cwl_adapters/OmeConverter.cwl index 66cefde..e91bf10 100644 --- a/cwl_adapters/OmeConverter.cwl +++ b/cwl_adapters/OmeConverter.cwl @@ -24,7 +24,7 @@ outputs: type: Directory requirements: DockerRequirement: - dockerPull: polusai/ome-converter-plugin:0.3.2-dev2 + dockerPull: polusai/ome-converter-tool:0.3.2 EnvVarRequirement: envDef: HOME: /home/polusai From 5ac4a709d847d92d31fc417cbf8514dc248d6e79 Mon Sep 17 00:00:00 2001 From: Hamdah Shafqat Abbasi Date: Thu, 3 Oct 2024 12:07:32 -0400 Subject: [PATCH 11/11] updated url for omeconverter --- cwl_adapters/OmeConverter.cwl | 6 +----- src/image/workflows/utils.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/cwl_adapters/OmeConverter.cwl b/cwl_adapters/OmeConverter.cwl index e91bf10..c66d2cf 100644 --- a/cwl_adapters/OmeConverter.cwl +++ b/cwl_adapters/OmeConverter.cwl @@ -1,10 +1,6 @@ class: CommandLineTool cwlVersion: v1.2 inputs: - fileExtension: - inputBinding: - prefix: --fileExtension - type: string filePattern: inputBinding: prefix: --filePattern @@ -24,7 +20,7 @@ outputs: type: Directory requirements: DockerRequirement: - dockerPull: polusai/ome-converter-tool:0.3.2 + dockerPull: polusai/ome-converter-tool:0.3.3-dev1 EnvVarRequirement: envDef: HOME: /home/polusai diff --git a/src/image/workflows/utils.py b/src/image/workflows/utils.py index 52b9b77..aae6100 100644 --- a/src/image/workflows/utils.py +++ b/src/image/workflows/utils.py @@ -14,7 +14,7 @@ MANIFEST_URLS = { "bbbc_download": f"{GITHUB_TAG}/saketprem/polus-plugins/bbbc_download/utils/bbbc-download-plugin/plugin.json", "file_renaming": f"{GITHUB_TAG}/hamshkhawar/image-tools/filepattern_filerenaming/formats/file-renaming-tool/plugin.json", - "ome_converter": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/formats/ome-converter-tool/plugin.json", + "ome_converter": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/fix_endian_bug_omeconverter/formats/ome-converter-tool/plugin.json", "estimate_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/regression/basic-flatfield-estimation-tool/plugin.json", "apply_flatfield": f"{GITHUB_TAG}/PolusAI/image-tools/refs/heads/master/transforms/images/apply-flatfield-tool/plugin.json", "kaggle_nuclei_segmentation": f"{GITHUB_TAG}/hamshkhawar/image-tools/refs/heads/kaggle_update_dependencies/segmentation/kaggle-nuclei-segmentation-tool/plugin.json",