Coverage for biobb_haddock/haddock/clust_fcc.py: 72%
65 statements
« prev ^ index » next coverage.py v7.10.2, created at 2025-08-07 08:48 +0000
« prev ^ index » next coverage.py v7.10.2, created at 2025-08-07 08:48 +0000
1#!/usr/bin/env python3
3"""Module containing the haddock class and the command line interface."""
5import argparse
6import shutil
7from pathlib import Path
8from typing import Optional
10from biobb_common.configuration import settings
11from biobb_common.generic.biobb_object import BiobbObject
12from biobb_common.tools import file_utils as fu
13from biobb_common.tools.file_utils import launchlogger
15from biobb_haddock.haddock.common import create_cfg, unzip_workflow_data
18class ClustFCC(BiobbObject):
19 """
20 | biobb_haddock ClustFCC
21 | Wrapper class for the Haddock ClustFCC module.
22 | The ClustFCC module. `Haddock ClustFCC module <https://www.bonvinlab.org/haddock3/modules/analysis/haddock.modules.analysis.clustfcc.html>`_ computes clusters of structures using FCC.
24 Args:
25 input_haddock_wf_data_zip (str): Path to the input zipball containing all the current Haddock workflow data. File type: input. `Sample file <https://github.com/bioexcel/biobb_haddock/raw/master/biobb_haddock/test/data/haddock/haddock_wf_data_rigid.zip>`_. Accepted formats: zip (edam:format_3987).
26 output_cluster_zip_path (str): Path to the output PDB file collection in zip format. File type: output. `Sample file <https://raw.githubusercontent.com/bioexcel/biobb_haddock/master/biobb_haddock/test/reference/haddock/ref_clustfcc.zip>`_. Accepted formats: zip (edam:format_3987).
27 output_haddock_wf_data_zip (str) (Optional): Path to the output zipball containing all the current Haddock workflow data. File type: output. `Sample file <https://github.com/bioexcel/biobb_haddock/raw/master/biobb_haddock/test/data/haddock/haddock_wf_data_clustfcc.zip>`_. Accepted formats: zip (edam:format_3987).
28 haddock_config_path (str) (Optional): Haddock configuration CFG file path. File type: input. `Sample file <https://raw.githubusercontent.com/bioexcel/biobb_haddock/master/biobb_haddock/test/data/haddock/run.cfg>`_. Accepted formats: cfg (edam:format_1476).
29 properties (dict - Python dictionary object containing the tool parameters, not input/output files):
30 * **cfg** (*dict*) - ({}) Haddock configuration options specification.
31 * **global_cfg** (*dict*) - ({"postprocess": False}) `Global configuration options <https://www.bonvinlab.org/haddock3-user-manual/global_parameters.html>`_ specification.
32 * **binary_path** (*str*) - ("haddock") Path to the haddock haddock executable binary.
33 * **remove_tmp** (*bool*) - (True) [WF property] Remove temporal files.
34 * **restart** (*bool*) - (False) [WF property] Do not execute if output files exist.
35 * **sandbox_path** (*str*) - ("./") [WF property] Parent path to the sandbox directory.
36 * **container_path** (*str*) - (None) Path to the binary executable of your container.
37 * **container_image** (*str*) - (None) Container Image identifier.
38 * **container_volume_path** (*str*) - ("/data") Path to an internal directory in the container.
39 * **container_working_dir** (*str*) - (None) Path to the internal CWD in the container.
40 * **container_user_id** (*str*) - (None) User number id to be mapped inside the container.
41 * **container_shell_path** (*str*) - ("/bin/bash") Path to the binary executable of the container shell.
44 Examples:
45 This is a use example of how to use the building block from Python::
47 from biobb_haddock.haddock.clust_fcc import clust_fcc
48 prop = { 'binary_path': 'haddock' }
49 clust_fcc(input_haddock_wf_data_zip='/path/to/myworkflowdata.zip',
50 output_evaluation_zip='/path/to/myevalfiles.zip',
51 properties=prop)
53 Info:
54 * wrapped_software:
55 * name: Haddock3
56 * version: 2025.5
57 * license: Apache-2.0
58 * ontology:
59 * name: EDAM
60 * schema: http://edamontology.org/EDAM.owl
61 """
63 def __init__(
64 self,
65 input_haddock_wf_data_zip: str,
66 output_cluster_zip_path: str,
67 output_haddock_wf_data_zip: Optional[str] = None,
68 haddock_config_path: Optional[str] = None,
69 properties: Optional[dict] = None,
70 **kwargs,
71 ) -> None:
72 properties = properties or {}
74 # Call parent class constructor
75 super().__init__(properties)
77 # Input/Output files
78 self.io_dict = {
79 "in": {"haddock_config_path": haddock_config_path},
80 "out": {
81 "output_haddock_wf_data_zip": output_haddock_wf_data_zip,
82 "output_cluster_zip_path": output_cluster_zip_path,
83 },
84 }
85 # Should not be copied inside container
86 self.input_haddock_wf_data_zip = input_haddock_wf_data_zip
88 # Properties specific for BB
89 self.haddock_step_name = "clustfcc"
90 self.output_cfg_path = properties.get("output_cfg_path", "haddock.cfg")
91 self.cfg = {k: v for k, v in properties.get("cfg", dict()).items()}
92 self.global_cfg = properties.get("global_cfg", dict(postprocess=False))
94 # Properties specific for BB
95 self.binary_path = properties.get("binary_path", "haddock3")
97 # Check the properties
98 self.check_properties(properties)
100 @launchlogger
101 def launch(self) -> int:
102 """Execute the :class:`ClustFCC <biobb_haddock.haddock.clust_fcc>` object."""
103 # tmp_files = []
105 # Setup Biobb
106 if self.check_restart():
107 return 0
108 self.stage_files()
110 # Unzip workflow data to workflow_data_out
111 run_dir = unzip_workflow_data(
112 zip_file=self.input_haddock_wf_data_zip, out_log=self.out_log
113 )
115 workflow_dict = {"haddock_step_name": self.haddock_step_name}
116 workflow_dict.update(self.global_cfg)
118 # Create data dir
119 cfg_dir = fu.create_unique_dir()
120 self.output_cfg_path = create_cfg(
121 output_cfg_path=str(Path(cfg_dir).joinpath(self.output_cfg_path)),
122 workflow_dict=workflow_dict,
123 input_cfg_path=self.stage_io_dict["in"].get("haddock_config_path"),
124 cfg_properties_dict=self.cfg,
125 )
127 if self.container_path:
128 fu.log("Container execution enabled", self.out_log)
130 shutil.copy2(
131 self.output_cfg_path, str(self.stage_io_dict.get("unique_dir", ""))
132 )
133 self.output_cfg_path = str(
134 Path(self.container_volume_path).joinpath(
135 Path(self.output_cfg_path).name
136 )
137 )
139 shutil.copytree(
140 run_dir,
141 str(
142 Path(self.stage_io_dict.get("unique_dir", "")).joinpath(
143 Path(run_dir).name
144 )
145 ),
146 )
147 run_dir = str(
148 Path(self.stage_io_dict.get("unique_dir", "")).joinpath(
149 Path(run_dir).name
150 )
151 )
153 self.cmd = [self.binary_path, self.output_cfg_path, "--extend-run", run_dir]
155 # Run Biobb block
156 self.run_biobb()
158 # Copy files to host
159 # self.copy_to_host()
161 # Copy output
162 haddock_output_list = [
163 str(path)
164 for path in Path(run_dir).iterdir()
165 if path.is_dir() and str(path).endswith(workflow_dict["haddock_step_name"])
166 ]
167 haddock_output_list.sort(reverse=True)
168 output_file_list = [
169 str(path)
170 for path in Path(haddock_output_list[0]).iterdir()
171 if path.is_file() and str(path.name) not in ["io.json", "params.cfg"]
172 ]
173 fu.zip_list(
174 self.io_dict["out"]["output_cluster_zip_path"],
175 output_file_list,
176 self.out_log,
177 )
179 # Create zip output
180 if self.io_dict["out"].get("output_haddock_wf_data_zip"):
181 fu.log(
182 f"Zipping {run_dir} to {str(Path(self.io_dict['out']['output_haddock_wf_data_zip']).with_suffix(''))} ",
183 self.out_log,
184 self.global_log,
185 )
186 shutil.make_archive(
187 str(
188 Path(self.io_dict["out"]["output_haddock_wf_data_zip"]).with_suffix(
189 ""
190 )
191 ),
192 "zip",
193 run_dir,
194 )
196 # Remove temporal files
197 self.tmp_files.extend([run_dir,
198 cfg_dir,
199 self.stage_io_dict.get("unique_dir")
200 ])
201 self.remove_tmp_files()
203 return self.return_code
206def clust_fcc(
207 input_haddock_wf_data_zip: str,
208 output_cluster_zip_path: str,
209 output_haddock_wf_data_zip: Optional[str] = None,
210 haddock_config_path: Optional[str] = None,
211 properties: Optional[dict] = None,
212 **kwargs,
213) -> int:
214 """Create :class:`ClustFCC <biobb_haddock.haddock.clust_fcc>` class and
215 execute the :meth:`launch() <biobb_haddock.haddock.clust_fcc.launch>` method."""
217 return ClustFCC(
218 input_haddock_wf_data_zip=input_haddock_wf_data_zip,
219 output_cluster_zip_path=output_cluster_zip_path,
220 output_haddock_wf_data_zip=output_haddock_wf_data_zip,
221 addock_config_path=haddock_config_path,
222 properties=properties,
223 **kwargs,
224 ).launch()
227def main():
228 parser = argparse.ArgumentParser(
229 description="Wrapper of the haddock ClustFCC module.",
230 formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=99999),
231 )
232 parser.add_argument(
233 "-c",
234 "--config",
235 required=False,
236 help="This file can be a YAML file, JSON file or JSON string",
237 )
239 # Specific args of each building block
240 required_args = parser.add_argument_group("required arguments")
241 required_args.add_argument("--input_haddock_wf_data_zip", required=True)
242 required_args.add_argument("--output_cluster_zip_path", required=True)
243 parser.add_argument("--output_haddock_wf_data_zip", required=False)
244 parser.add_argument("--haddock_config_path", required=False)
246 args = parser.parse_args()
247 config = args.config if args.config else None
248 properties = settings.ConfReader(config=config).get_prop_dic()
250 # Specific call of each building block
251 clust_fcc(
252 input_haddock_wf_data_zip=args.input_haddock_wf_data_zip,
253 output_cluster_zip_path=args.output_cluster_zip_path,
254 reference_pdb_path=args.reference_pdb_path,
255 output_haddock_wf_data_zip=args.output_haddock_wf_data_zip,
256 haddock_config_path=args.haddock_config_path,
257 properties=properties,
258 )
261if __name__ == "__main__":
262 main()