Coverage for biobb_ml/utils/map_variables.py: 77%
62 statements
« prev ^ index » next coverage.py v7.6.1, created at 2024-10-03 14:57 +0000
« prev ^ index » next coverage.py v7.6.1, created at 2024-10-03 14:57 +0000
1#!/usr/bin/env python3
3"""Module containing the MapVariables class and the command line interface."""
4import argparse
5import pandas as pd
6from biobb_common.generic.biobb_object import BiobbObject
7from biobb_common.configuration import settings
8from biobb_common.tools import file_utils as fu
9from biobb_common.tools.file_utils import launchlogger
10from biobb_ml.utils.common import check_input_path, check_output_path, getHeader, getTargetsList, getIndependentVarsList
13class MapVariables(BiobbObject):
14 """
15 | biobb_ml MapVariables
16 | Maps the values of a given dataset.
17 | Maps the values of a given dataset according to input correspondence, substituting each value in a series with another value, which may be derived from a function, a dictionary, or another series.
19 Args:
20 input_dataset_path (str): Path to the input dataset. File type: input. `Sample file <https://github.com/bioexcel/biobb_ml/raw/master/biobb_ml/test/data/utils/dataset_map_variables.csv>`_. Accepted formats: csv (edam:format_3752).
21 output_dataset_path (str): Path to the output dataset. File type: output. `Sample file <https://github.com/bioexcel/biobb_ml/raw/master/biobb_ml/test/reference/utils/ref_output_dataset_map_variables.csv>`_. Accepted formats: csv (edam:format_3752).
22 properties (dic):
23 * **targets** (*dict*) - ({}) Independent variables or columns from your dataset you want to drop. If None given, all the columns will be taken. You can specify either a list of columns names from your input dataset, a list of columns indexes or a range of columns indexes. Formats: { "columns": ["column1", "column2"] } or { "indexes": [0, 2, 3, 10, 11, 17] } or { "range": [[0, 20], [50, 102]] }. In case of mulitple formats, the first one will be picked.
24 * **remove_tmp** (*bool*) - (True) [WF property] Remove temporal files.
25 * **restart** (*bool*) - (False) [WF property] Do not execute if output files exist.
26 * **sandbox_path** (*str*) - ("./") [WF property] Parent path to the sandbox directory.
28 Examples:
29 This is a use example of how to use the building block from Python::
31 from biobb_ml.utils.map_variables import map_variables
32 prop = {
33 'targets': {
34 'columns': [ 'column1', 'column2', 'column3' ]
35 }
36 }
37 map_variables(input_dataset_path='/path/to/myDataset.csv',
38 output_dataset_path='/path/to/newDataset.csv',
39 properties=prop)
41 Info:
42 * wrapped_software:
43 * name: In house
44 * license: Apache-2.0
45 * ontology:
46 * name: EDAM
47 * schema: http://edamontology.org/EDAM.owl
49 """
51 def __init__(self, input_dataset_path, output_dataset_path,
52 properties=None, **kwargs) -> None:
53 properties = properties or {}
55 # Call parent class constructor
56 super().__init__(properties)
57 self.locals_var_dict = locals().copy()
59 # Input/Output files
60 self.io_dict = {
61 "in": {"input_dataset_path": input_dataset_path},
62 "out": {"output_dataset_path": output_dataset_path}
63 }
65 # Properties specific for BB
66 self.targets = properties.get('targets', {})
67 self.properties = properties
69 # Check the properties
70 self.check_properties(properties)
71 self.check_arguments()
73 def check_data_params(self, out_log, err_log):
74 """ Checks all the input/output paths and parameters """
75 self.io_dict["in"]["input_dataset_path"] = check_input_path(self.io_dict["in"]["input_dataset_path"], "input_dataset_path", out_log, self.__class__.__name__)
76 self.io_dict["out"]["output_dataset_path"] = check_output_path(self.io_dict["out"]["output_dataset_path"], "output_dataset_path", False, out_log, self.__class__.__name__)
78 @launchlogger
79 def launch(self) -> int:
80 """Execute the :class:`MapVariables <utils.map_variables.MapVariables>` utils.map_variables.MapVariables object."""
82 # check input/output paths and parameters
83 self.check_data_params(self.out_log, self.err_log)
85 # Setup Biobb
86 if self.check_restart():
87 return 0
88 self.stage_files()
90 # load dataset
91 fu.log('Getting dataset from %s' % self.io_dict["in"]["input_dataset_path"], self.out_log, self.global_log)
92 if 'columns' in self.targets:
93 labels = getHeader(self.io_dict["in"]["input_dataset_path"])
94 skiprows = 1
95 else:
96 labels = None
97 skiprows = None
98 data = pd.read_csv(self.io_dict["in"]["input_dataset_path"], header=None, sep="\\s+|;|:|,|\t", engine="python", skiprows=skiprows, names=labels)
100 # map variables
101 fu.log('Mapping [%s] columns of the dataset' % getIndependentVarsList(self.targets), self.out_log, self.global_log)
102 # if None given, map all the columns
103 cols = getTargetsList(self.targets, 'dummy', self.out_log, self.__class__.__name__)
104 if not cols:
105 cols = list(data)
106 for c in cols:
107 lst = data[c].unique().tolist()
108 dct = {lst[i]: i for i in range(0, len(lst))}
109 data[c] = data[c].map(dct)
111 # save to csv
112 fu.log('Saving results to %s\n' % self.io_dict["out"]["output_dataset_path"], self.out_log, self.global_log)
113 data.to_csv(self.io_dict["out"]["output_dataset_path"], index=False, header=True, float_format='%.3f')
115 # Copy files to host
116 self.copy_to_host()
118 self.tmp_files.extend([
119 self.stage_io_dict.get("unique_dir")
120 ])
121 self.remove_tmp_files()
123 self.check_arguments(output_files_created=True, raise_exception=False)
125 return 0
128def map_variables(input_dataset_path: str, output_dataset_path: str, properties: dict = None, **kwargs) -> int:
129 """Execute the :class:`MapVariables <utils.map_variables.MapVariables>` class and
130 execute the :meth:`launch() <utils.map_variables.MapVariables.launch>` method."""
132 return MapVariables(input_dataset_path=input_dataset_path,
133 output_dataset_path=output_dataset_path,
134 properties=properties, **kwargs).launch()
137def main():
138 """Command line execution of this building block. Please check the command line documentation."""
139 parser = argparse.ArgumentParser(description="Maps the values of a given dataset.", formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=99999))
140 parser.add_argument('--config', required=False, help='Configuration file')
142 # Specific args of each building block
143 required_args = parser.add_argument_group('required arguments')
144 required_args.add_argument('--input_dataset_path', required=True, help='Path to the input dataset. Accepted formats: csv.')
145 required_args.add_argument('--output_dataset_path', required=True, help='Path to the output dataset. Accepted formats: csv.')
147 args = parser.parse_args()
148 args.config = args.config or "{}"
149 properties = settings.ConfReader(config=args.config).get_prop_dic()
151 # Specific call of each building block
152 map_variables(input_dataset_path=args.input_dataset_path,
153 output_dataset_path=args.output_dataset_path,
154 properties=properties)
157if __name__ == '__main__':
158 main()