diff --git a/.github/workflows/typing-check.yml b/.github/workflows/typing-check.yml index 4de561718..553404730 100644 --- a/.github/workflows/typing-check.yml +++ b/.github/workflows/typing-check.yml @@ -16,7 +16,7 @@ jobs: max-parallel: 3 matrix: # add packages to check typing - package-name: ["hdf5-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"] + package-name: ["geos-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"] steps: - uses: actions/checkout@v3 diff --git a/geos-timehistory/src/geos/timehistory/__init__.py b/geos-timehistory/src/geos/timehistory/__init__.py index b288976bb..9e6339cc0 100644 --- a/geos-timehistory/src/geos/timehistory/__init__.py +++ b/geos-timehistory/src/geos/timehistory/__init__.py @@ -1 +1 @@ -from .plot_time_history import getHistorySeries +from .plot_time_history import getHistorySeries #noqa: F401 diff --git a/geos-timehistory/src/geos/timehistory/plot_time_history.py b/geos-timehistory/src/geos/timehistory/plot_time_history.py index 9f2514e9c..ae9fae660 100644 --- a/geos-timehistory/src/geos/timehistory/plot_time_history.py +++ b/geos-timehistory/src/geos/timehistory/plot_time_history.py @@ -1,40 +1,41 @@ -import numpy as np +from typing import Any, Optional from geos.hdf5_wrapper import wrapper as h5w -import matplotlib as mpl import matplotlib.pyplot as plt import os -import sys import argparse import re -def isiterable( obj ): +def isiterable( obj: Any ) -> bool: + """Check if input is iterable.""" try: - it = iter( obj ) + it = iter( obj ) # noqa: F841 except TypeError: return False return True -def getHistorySeries( database, variable, setname, indices=None, components=None ): - """ - Retrieve a series of time history structures suitable for plotting in addition to the specific set index and component for the time series +def getHistorySeries( database: h5w, + variable: str, + setname: str, + indices: Optional[ int | list[ int ] ] = None, + components: Optional[ int | list[ int ] ] = None ) -> Optional[ list[ tuple[ Any, ...] ] ]: + """Retrieve a series of time history structures suitable for plotting in addition to the specific set index and component for the time series. Args: database (geos.hdf5_wrapper.hdf5_wrapper): database to retrieve time history data from variable (str): the name of the time history variable for which to retrieve time-series data setname (str): the name of the index set as specified in the geosx input xml for which to query time-series data - indices (int, list): the indices in the named set to query for, if None, defaults to all - components (int, list): the components in the flattened data types to retrieve, defaults to all - + indices (Optional[int | list[ int ]]): the indices in the named set to query for, if None, defaults to all + components (Optional[int | list[ int ]]): the components in the flattened data types to retrieve, defaults to all + Returns: - list: list of (time, data, idx, comp) timeseries tuples for each time history data component + Optional[list[ tuple[ Any, ...] ]]: list of (time, data, idx, comp) timeseries tuples for each time history data component """ - set_regex = re.compile( variable + '(.*?)', re.IGNORECASE ) if setname is not None: - set_regex = re.compile( variable + '\s*' + str( setname ), re.IGNORECASE ) + set_regex = re.compile( variable + r'\s*' + str( setname ), re.IGNORECASE ) time_regex = re.compile( 'Time', re.IGNORECASE ) # need to make this per-set, thought that was in already? set_match = list( filter( set_regex.match, database.keys() ) ) @@ -62,39 +63,48 @@ def getHistorySeries( database, variable, setname, indices=None, components=None print( f"Error: The length of the time-series {time_match} and data-series {set_match} do not match: {time_series.shape} and {data_series.shape} !" ) - + indices1: list[ int ] = [] if indices is not None: if type( indices ) is int: - indices = [ indices ] - if isiterable( indices ): - oob_idxs = list( filter( lambda idx: not 0 <= idx < data_series.shape[ 1 ], indices ) ) + indices1 = [ indices ] + elif isiterable( indices ): + oob_idxs: list[ int ] = list( + filter( + lambda idx: not 0 <= idx < data_series.shape[ 1 ], # type: ignore[arg-type] + indices ) ) # type: ignore[arg-type] if len( oob_idxs ) > 0: - print( f"Error: The specified indices: ({', '.join(oob_idxs)}) " + "\n\t" + - f" are out of the dataset index range: [0,{data_series.shape[1]})" ) - indices = list( set( indices ) - set( oob_idxs ) ) + print( f"Error: The specified indices: ({', '.join(map(str, oob_idxs))}) " + "\n\t" + + f" are out of the dataset index range: [0,{data_series.shape[1]})" ) # type: ignore[arg-type] + indices1 = list( set( indices ) - set( oob_idxs ) ) # type: ignore[arg-type] else: print( f"Error: unsupported indices type: {type(indices)}" ) else: - indices = range( data_series.shape[ 1 ] ) + indices1 = list( range( data_series.shape[ 1 ] ) ) + components1: list[ int ] = [] if components is not None: if type( components ) is int: - components = [ components ] - if isiterable( components ): - oob_comps = list( filter( lambda comp: not 0 <= comp < data_series.shape[ 2 ], components ) ) + components1 = [ components ] + elif isiterable( components ): + oob_comps: list[ int ] = list( + filter( + lambda comp: not 0 <= comp < data_series.shape[ 2 ], # type: ignore[arg-type] + components ) ) # type: ignore[arg-type] if len( oob_comps ) > 0: - print( f"Error: The specified components: ({', '.join(oob_comps)}) " + "\n\t" + + print( f"Error: The specified components: ({', '.join(map(str, oob_comps))}) " + "\n\t" + " is out of the dataset component range: [0,{data_series.shape[1]})" ) - components = list( set( components ) - set( oob_comps ) ) + components1 = list( set( components ) - set( oob_comps ) ) # type: ignore[arg-type] else: print( f"Error: unsupported components type: {type(components)}" ) else: - components = range( data_series.shape[ 2 ] ) + components1 = list( range( data_series.shape[ 2 ] ) ) - return [ ( time_series[ :, 0 ], data_series[ :, idx, comp ], idx, comp ) for idx in indices for comp in components ] + return [ ( time_series[ :, 0 ], data_series[ :, idx, comp ], idx, comp ) for idx in indices1 + for comp in components1 ] -def commandLinePlotGen(): +def commandLinePlotGen() -> int: + """Parse commande line.""" parser = argparse.ArgumentParser( description="A script that parses geosx HDF5 time-history files and produces time-history plots using matplotlib" ) diff --git a/geos-trame/pyproject.toml b/geos-trame/pyproject.toml index d483aa42d..1031aa23a 100644 --- a/geos-trame/pyproject.toml +++ b/geos-trame/pyproject.toml @@ -46,6 +46,7 @@ dependencies = [ "colorcet==3.1.0", "funcy==2.0", "typing_inspect==0.9.0", + "typing_extensions>=4.12", ] [project.optional-dependencies] diff --git a/geos-xml-tools/pyproject.toml b/geos-xml-tools/pyproject.toml index 72afb9d55..ba847c3fd 100644 --- a/geos-xml-tools/pyproject.toml +++ b/geos-xml-tools/pyproject.toml @@ -16,7 +16,8 @@ requires-python = ">=3.8" dependencies = [ "lxml>=4.5.0", "parameterized", - "numpy" + "numpy>=1.16.2", + "typing_extensions>=4.12" ] [project.scripts] @@ -25,3 +26,14 @@ dependencies = [ test_geosx_xml_tools = "geos.xml_tools.tests.test_manager:main" check_xml_attribute_coverage = "geos.xml_tools.attribute_coverage:main" check_xml_redundancy = "geos.xml_tools.xml_redundancy_check:main" + +[tool.pytest.ini_options] +addopts = "--import-mode=importlib" +console_output_style = "count" +pythonpath = [".", "src"] +python_classes = "Test" +python_files = "test*.py" +python_functions = "test*" +testpaths = ["tests"] +norecursedirs = "bin" +filterwarnings = [] \ No newline at end of file diff --git a/geos-xml-tools/src/geos/xml_tools/__init__.py b/geos-xml-tools/src/geos/xml_tools/__init__.py index c51fba5a2..8eaaf789c 100644 --- a/geos-xml-tools/src/geos/xml_tools/__init__.py +++ b/geos-xml-tools/src/geos/xml_tools/__init__.py @@ -1,3 +1 @@ -""" -A python module that enables advanced xml features for GEOSX. -""" +"""A python module that enables advanced xml features for GEOSX.""" diff --git a/geos-xml-tools/src/geos/xml_tools/attribute_coverage.py b/geos-xml-tools/src/geos/xml_tools/attribute_coverage.py index 806cf68e7..5687db368 100644 --- a/geos-xml-tools/src/geos/xml_tools/attribute_coverage.py +++ b/geos-xml-tools/src/geos/xml_tools/attribute_coverage.py @@ -12,7 +12,7 @@ def parse_schema_element( root: ElementTree.Element, xsd: str = '{http://www.w3.org/2001/XMLSchema}', recursive_types: Iterable[ str ] = [ 'PeriodicEvent', 'SoloEvent', 'HaltEvent' ], folders: Iterable[ str ] = [ 'src', 'examples' ] ) -> record_type: - """Parse the xml schema at the current level + """Parse the xml schema at the current level. Args: root (lxml.etree.Element): the root schema node @@ -24,7 +24,6 @@ def parse_schema_element( root: ElementTree.Element, Returns: dict: Dictionary of attributes and children for the current node """ - element_type = node.get( 'type' ) element_name = node.get( 'name' ) element_def = root.find( "%scomplexType[@name='%s']" % ( xsd, element_type ) ) @@ -49,7 +48,7 @@ def parse_schema_element( root: ElementTree.Element, def parse_schema( fname: str ) -> record_type: - """Parse the schema file into the xml attribute usage dict + """Parse the schema file into the xml attribute usage dict. Args: fname (str): schema name @@ -64,14 +63,14 @@ def parse_schema( fname: str ) -> record_type: def collect_xml_attributes_level( local_types: record_type, node: ElementTree.Element, folder: str ) -> None: - """Collect xml attribute usage at the current level + """Collect xml attribute usage at the current level. Args: local_types (dict): dictionary containing attribute usage node (lxml.etree.Element): current xml node folder (str): the source folder for the current file """ - for ka in node.attrib.keys(): + for ka in node.attrib: local_types[ 'attributes' ][ ka ][ folder ].append( node.get( ka ) ) for child in node: @@ -80,7 +79,7 @@ def collect_xml_attributes_level( local_types: record_type, node: ElementTree.El def collect_xml_attributes( xml_types: record_type, fname: str, folder: str ) -> None: - """Collect xml attribute usage in a file + """Collect xml attribute usage in a file. Args: xml_types (dict): dictionary containing attribute usage @@ -97,15 +96,15 @@ def collect_xml_attributes( xml_types: record_type, fname: str, folder: str ) -> def write_attribute_usage_xml_level( local_types: record_type, node: ElementTree.Element, folders: Iterable[ str ] = [ 'src', 'examples' ] ) -> None: - """Write xml attribute usage file at a given level + """Write xml attribute usage file at a given level. Args: local_types (dict): dict containing attribute usage at the current level node (lxml.etree.Element): current xml node + folders (Iterable[ str ]): folders. Defaults to [ 'src', 'examples' ]. """ - # Write attributes - for ka in local_types[ 'attributes' ].keys(): + for ka in local_types[ 'attributes' ]: attribute_node = ElementTree.Element( ka ) node.append( attribute_node ) @@ -129,7 +128,7 @@ def write_attribute_usage_xml_level( local_types: record_type, def write_attribute_usage_xml( xml_types: record_type, fname: str ) -> None: - """Write xml attribute usage file + """Write xml attribute usage file. Args: xml_types (dict): dictionary containing attribute usage by xml type @@ -143,13 +142,12 @@ def write_attribute_usage_xml( xml_types: record_type, fname: str ) -> None: def process_xml_files( geosx_root: str, output_name: str ) -> None: - """Test for xml attribute usage + """Test for xml attribute usage. Args: geosx_root (str): GEOSX root directory output_name (str): output file name """ - # Parse the schema geosx_root = os.path.expanduser( geosx_root ) schema = '%ssrc/coreComponents/schema/schema.xsd' % ( geosx_root ) @@ -168,13 +166,12 @@ def process_xml_files( geosx_root: str, output_name: str ) -> None: def main() -> None: - """Entry point for the xml attribute usage test script + """Entry point for the xml attribute usage test script. Args: -r/--root (str): GEOSX root directory -o/--output (str): output file name """ - # Parse the user arguments parser = command_line_parsers.build_attribute_coverage_input_parser() args = parser.parse_args() diff --git a/geos-xml-tools/src/geos/xml_tools/command_line_parsers.py b/geos-xml-tools/src/geos/xml_tools/command_line_parsers.py index 48b126f95..1d070d8b7 100644 --- a/geos-xml-tools/src/geos/xml_tools/command_line_parsers.py +++ b/geos-xml-tools/src/geos/xml_tools/command_line_parsers.py @@ -3,7 +3,7 @@ def build_preprocessor_input_parser() -> argparse.ArgumentParser: - """Build the argument parser + """Build the argument parser. Returns: argparse.ArgumentParser: The parser @@ -29,7 +29,7 @@ def build_preprocessor_input_parser() -> argparse.ArgumentParser: def parse_xml_preprocessor_arguments() -> Tuple[ argparse.Namespace, Iterable[ str ] ]: - """Parse user arguments + """Parse user arguments. Args: -i/--input (str): Input file name (multiple allowed) @@ -46,12 +46,11 @@ def parse_xml_preprocessor_arguments() -> Tuple[ argparse.Namespace, Iterable[ s def build_xml_formatter_input_parser() -> argparse.ArgumentParser: - """Build the argument parser + """Build the argument parser. Returns: argparse.ArgumentParser: the parser instance """ - parser = argparse.ArgumentParser() parser.add_argument( 'input', type=str, help='Input file name' ) parser.add_argument( '-i', '--indent', type=int, help='Indent size', default=2 ) @@ -64,12 +63,11 @@ def build_xml_formatter_input_parser() -> argparse.ArgumentParser: def build_attribute_coverage_input_parser() -> argparse.ArgumentParser: - """Build attribute coverage redundancy input parser + """Build attribute coverage redundancy input parser. Returns: argparse.ArgumentParser: parser instance """ - parser = argparse.ArgumentParser() parser.add_argument( '-r', '--root', type=str, help='GEOSX root', default='' ) parser.add_argument( '-o', '--output', type=str, help='Output file name', default='attribute_test.xml' ) @@ -77,12 +75,11 @@ def build_attribute_coverage_input_parser() -> argparse.ArgumentParser: def build_xml_redundancy_input_parser() -> argparse.ArgumentParser: - """Build xml redundancy input parser + """Build xml redundancy input parser. Returns: argparse.ArgumentParser: parser instance """ - parser = argparse.ArgumentParser() parser.add_argument( '-r', '--root', type=str, help='GEOSX root', default='' ) return parser diff --git a/geos-xml-tools/src/geos/xml_tools/main.py b/geos-xml-tools/src/geos/xml_tools/main.py index a5cef1bdd..6c4d3da88 100644 --- a/geos-xml-tools/src/geos/xml_tools/main.py +++ b/geos-xml-tools/src/geos/xml_tools/main.py @@ -1,15 +1,14 @@ -"""Command line tools for geosx_xml_tools""" - import sys -import argparse import os import time from geos.xml_tools import xml_processor, command_line_parsers -from typing import Callable, Any, Union, Tuple, Iterable +from typing import Callable, Any, Union, Iterable + +__doc__ = """Command line tools for geosx_xml_tools.""" def check_mpi_rank() -> int: - """Check the MPI rank + """Check the MPI rank. Returns: int: MPI rank @@ -28,7 +27,7 @@ def check_mpi_rank() -> int: def wait_for_file_write_rank_0( target_file_argument: Union[ int, str ] = 0, max_wait_time: float = 100, max_startup_delay: float = 1 ) -> Callable[ [ TFunc ], TFunc ]: - """Constructor for a function decorator that waits for a target file to be written on rank 0 + """Constructor for a function decorator that waits for a target file to be written on rank 0. Args: target_file_argument (int, str): Index or keyword of the filename argument in the decorated function @@ -40,15 +39,14 @@ def wait_for_file_write_rank_0( target_file_argument: Union[ int, str ] = 0, """ def wait_for_file_write_rank_0_inner( writer: TFunc ) -> TFunc: - """Intermediate constructor for the function decorator + """Intermediate constructor for the function decorator. Args: writer (typing.Callable): A function that writes to a file """ - def wait_for_file_write_rank_0_decorator( *args, **kwargs ) -> Any: - """Apply the writer on rank 0, and wait for completion on other ranks - """ + def wait_for_file_write_rank_0_decorator( *args, **kwargs ) -> Any: # noqa: ANN002, ANN003 + """Apply the writer on rank 0, and wait for completion on other ranks.""" # Check the target file status rank = check_mpi_rank() fname = '' @@ -87,9 +85,7 @@ def wait_for_file_write_rank_0_decorator( *args, **kwargs ) -> Any: def preprocess_serial() -> None: - """ - Entry point for the geosx_xml_tools console script - """ + """Entry point for the geosx_xml_tools console script.""" # Process the xml file args, unknown_args = command_line_parsers.parse_xml_preprocessor_arguments() @@ -121,9 +117,7 @@ def preprocess_serial() -> None: def preprocess_parallel() -> Iterable[ str ]: - """ - MPI aware xml preprocesing - """ + """MPI aware xml preprocesing.""" # Process the xml file from mpi4py import MPI # type: ignore[import] comm = MPI.COMM_WORLD @@ -142,7 +136,7 @@ def preprocess_parallel() -> Iterable[ str ]: def format_geosx_arguments( compiled_name: str, unknown_args: Iterable[ str ] ) -> Iterable[ str ]: - """Format GEOSX arguments + """Format GEOSX arguments. Args: compiled_name (str): Name of the compiled xml file diff --git a/geos-xml-tools/src/geos/xml_tools/regex_tools.py b/geos-xml-tools/src/geos/xml_tools/regex_tools.py index a7e586b41..cf4c27de6 100644 --- a/geos-xml-tools/src/geos/xml_tools/regex_tools.py +++ b/geos-xml-tools/src/geos/xml_tools/regex_tools.py @@ -1,8 +1,9 @@ -"""Tools for managing regular expressions in geosx_xml_tools""" - import re -from typing import Union, Dict -""" +from typing import Dict + +__doc__ = """ +Tools for managing regular expressions in geosx_xml_tools. + Define regex patterns used throughout the module: Pattern | Example targets | Notes @@ -56,6 +57,7 @@ class DictRegexHandler(): def __init__( self ) -> None: """Initialize the handler with an empty target list. + The key/value pairs of self.target indicate which values to look for and the values they will replace with. """ @@ -67,10 +69,9 @@ def __call__( self, match: re.Match ) -> str: Args: match (re.match): A matching string identified by the regex. """ - k = match.group( 1 ) if k: - if ( k not in self.target.keys() ): + if ( k not in self.target ): raise Exception( 'Error: Target (%s) is not defined in the regex handler' % k ) value = self.target[ k ] return str( value ) diff --git a/geos-xml-tools/src/geos/xml_tools/table_generator.py b/geos-xml-tools/src/geos/xml_tools/table_generator.py index 9fbe4d87a..d92bbea84 100644 --- a/geos-xml-tools/src/geos/xml_tools/table_generator.py +++ b/geos-xml-tools/src/geos/xml_tools/table_generator.py @@ -1,8 +1,8 @@ -"""Tools for reading/writing GEOSX ascii tables""" - import numpy as np from typing import Tuple, Iterable, Dict +__doc__ = """Tools for reading/writing GEOSX ascii tables.""" + def write_GEOS_table( axes_values: Iterable[ np.ndarray ], properties: Dict[ str, np.ndarray ], @@ -16,19 +16,18 @@ def write_GEOS_table( axes_values: Iterable[ np.ndarray ], axes_names (list): Names for each axis (default = ['x', 'y', 'z', 't']) string_format (str): Format for output values (default = %1.5e) """ - # Check to make sure the axes/property files have the correct shape axes_shape = tuple( [ len( x ) for x in axes_values ] ) - for k in properties.keys(): + for k in properties: if ( np.shape( properties[ k ] ) != axes_shape ): raise Exception( "Shape of parameter %s is incompatible with given axes" % ( k ) ) # Write axes files - for ka, x in zip( axes_names, axes_values ): + for ka, x in zip( axes_names, axes_values, strict=False ): np.savetxt( '%s.geos' % ( ka ), x, fmt=string_format, delimiter=',' ) # Write property files - for k in properties.keys(): + for k in properties: tmp = np.reshape( properties[ k ], ( -1 ), order='F' ) np.savetxt( '%s.geos' % ( k ), tmp, fmt=string_format, delimiter=',' ) @@ -60,7 +59,6 @@ def read_GEOS_table( axes_files: Iterable[ str ], def write_read_GEOS_table_example() -> None: """Table read / write example.""" - # Define table axes a = np.array( [ 0.0, 1.0 ] ) b = np.array( [ 0.0, 0.5, 1.0 ] ) diff --git a/geos-xml-tools/src/geos/xml_tools/tests/generate_test_xml.py b/geos-xml-tools/src/geos/xml_tools/tests/generate_test_xml.py index a1ab53c98..fbcc27f4d 100644 --- a/geos-xml-tools/src/geos/xml_tools/tests/generate_test_xml.py +++ b/geos-xml-tools/src/geos/xml_tools/tests/generate_test_xml.py @@ -1,17 +1,16 @@ """Tool for generating test xml files for processing.""" -from lxml import etree as ElementTree import os from geos.xml_tools import xml_formatter -def generate_test_xml_files( root_dir ): +def generate_test_xml_files( root_dir: str ) -> None: """Build example input/output xml files, which can be used to test the parser. + These are derived from a GEOSX integrated test xml. @param root_dir The folder to write the example xml files. """ - # Build segments of an xml file that can be compiled to form a test # File header/footer xml_header = """""" diff --git a/geos-xml-tools/src/geos/xml_tools/tests/test_manager.py b/geos-xml-tools/src/geos/xml_tools/tests/test_manager.py index 798af3c06..722ae5b7a 100644 --- a/geos-xml-tools/src/geos/xml_tools/tests/test_manager.py +++ b/geos-xml-tools/src/geos/xml_tools/tests/test_manager.py @@ -1,3 +1,4 @@ +from typing_extensions import Self import unittest import re import os @@ -15,10 +16,12 @@ class TestUnitManager( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set tests up.""" cls.tol = 1e-6 # type: ignore[attr-defined] - def test_unit_dict( self ): + def test_unit_dict( self: Self ) -> None: + """Test unit dictionnary.""" unitManager.buildUnits() self.assertTrue( bool( unitManager.units ) ) @@ -32,7 +35,15 @@ def test_unit_dict( self ): [ 'm**2', '1', 1.0 ], [ 'km**2', '1', 1.0e6 ], [ 'kilometer**2', '1', 1.0e6 ], [ '(km*mm)', '1', 1.0 ], [ '(km*mm)**2', '1', 1.0 ], [ 'km^2', '1', 1.0e6, True ], [ 'bbl/day', '1', 0.000001840130728333 ], [ 'cP', '1', 0.001 ] ] ) - def test_units( self, unit, scale, expected_value, expect_fail=False ): + def test_units( self: Self, unit: str, scale: int, expected_value: float, expect_fail: bool = False ) -> None: + """Test of units. + + Args: + unit (str): unit + scale (int): scale + expected_value (float): expected value + expect_fail (bool, optional): accepts failure if True. Defaults to False. + """ try: val = float( unitManager( [ scale, unit ] ) ) self.assertTrue( ( abs( val - expected_value ) < self.tol ) != expect_fail ) # type: ignore[attr-defined] @@ -44,7 +55,8 @@ def test_units( self, unit, scale, expected_value, expect_fail=False ): class TestParameterRegex( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set the tests up.""" cls.regexHandler = regex_tools.DictRegexHandler() # type: ignore[attr-defined] cls.regexHandler.target[ 'foo' ] = '1.23' # type: ignore[attr-defined] cls.regexHandler.target[ 'bar' ] = '4.56e7' # type: ignore[attr-defined] @@ -55,7 +67,14 @@ def setUpClass( cls ): [ '$foo$*1.234/$bar$', '1.23*1.234/4.56e7' ], [ '$blah$*1.234/$bar$', '1.23*1.234/4.56e7', True ], [ '$foo$*1.234/$bar$', '4.56e7*1.234/4.56e7', True ] ] ) - def test_parameter_regex( self, parameterInput, expectedValue, expect_fail=False ): + def test_parameter_regex( self: Self, parameterInput: str, expectedValue: str, expect_fail: bool = False ) -> None: + """Test of parapeter regex. + + Args: + parameterInput (str): input value + expectedValue (str): expected output value + expect_fail (bool, optional): accepts failure if True. Defaults to False. + """ try: result = re.sub( regex_tools.patterns[ 'parameters' ], @@ -70,7 +89,8 @@ def test_parameter_regex( self, parameterInput, expectedValue, expect_fail=False class TestUnitsRegex( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set the test up.""" cls.tol = 1e-6 # type: ignore[attr-defined] @parameterized.expand( [ [ '1.234[m**2/s]', '1.234' ], [ '1.234 [m**2/s]', '1.234' ], @@ -78,7 +98,14 @@ def setUpClass( cls ): [ '1.234[m**2/s] + 5.678[mm/s]', '1.234 + 5.678e-3' ], [ '1.234 [m**2/s] + 5.678 [mm/s]', '1.234 + 5.678e-3' ], [ '(1.234[m**2/s])*5.678', '(1.234)*5.678' ] ] ) - def test_units_regex( self, unitInput, expectedValue, expect_fail=False ): + def test_units_regex( self: Self, unitInput: str, expectedValue: str, expect_fail: bool = False ) -> None: + """Test units regex. + + Args: + unitInput (str): input unit + expectedValue (str): expected output value + expect_fail (bool, optional): Accepts failure if True. Defaults to False. + """ try: result = re.sub( regex_tools.patterns[ 'units' ], unitManager.regexHandler, unitInput ) self.assertTrue( ( result == expectedValue ) != expect_fail ) @@ -90,7 +117,8 @@ def test_units_regex( self, unitInput, expectedValue, expect_fail=False ): class TestSymbolicRegex( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set the tests up.""" cls.tol = 1e-6 # type: ignore[attr-defined] @parameterized.expand( [ [ '`1.234`', '1.234' ], [ '`1.234*2.0`', '2.468' ], [ '`10`', '1e1' ], [ '`10*2`', '2e1' ], @@ -98,7 +126,14 @@ def setUpClass( cls ): [ '`(1.0 + 2.0)**2`', '9' ], [ '`((1.0 + 2.0)**2)**(0.5)`', '3' ], [ '`(1.2e3)*2`', '2.4e3' ], [ '`1.2e3*2`', '2.4e3' ], [ '`2.0^2`', '4', True ], [ '`sqrt(4.0)`', '2', True ] ] ) - def test_symbolic_regex( self, symbolicInput, expectedValue, expect_fail=False ): + def test_symbolic_regex( self: Self, symbolicInput: str, expectedValue: str, expect_fail: bool = False ) -> None: + """Test of symbolic regex. + + Args: + symbolicInput (str): symbolic input + expectedValue (str): expected value + expect_fail (bool, optional): Accepts failure if True. Defaults to False. + """ try: result = re.sub( regex_tools.patterns[ 'symbolic' ], regex_tools.SymbolicMathRegexHandler, symbolicInput ) self.assertTrue( ( result == expectedValue ) != expect_fail ) @@ -110,14 +145,22 @@ def test_symbolic_regex( self, symbolicInput, expectedValue, expect_fail=False ) class TestXMLProcessor( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set test up.""" generate_test_xml.generate_test_xml_files( '.' ) @parameterized.expand( [ [ 'no_advanced_features_input.xml', 'no_advanced_features_target.xml' ], [ 'parameters_input.xml', 'parameters_target.xml' ], [ 'included_input.xml', 'included_target.xml' ], [ 'symbolic_parameters_input.xml', 'symbolic_parameters_target.xml' ] ] ) - def test_xml_processor( self, input_file, target_file, expect_fail=False ): + def test_xml_processor( self: Self, input_file: str, target_file: str, expect_fail: bool = False ) -> None: + """Test of xml processor. + + Args: + input_file (str): input file name + target_file (str): target file name + expect_fail (bool, optional): Accept failure if True. Defaults to False. + """ try: tmp = xml_processor.process( input_file, outputFile=input_file + '.processed', @@ -129,8 +172,13 @@ def test_xml_processor( self, input_file, target_file, expect_fail=False ): self.assertTrue( expect_fail ) -# Main entry point for the unit tests -def run_unit_tests( test_dir, verbose ): +def run_unit_tests( test_dir: str, verbose: int ) -> None: + """Main entry point for the unit tests. + + Args: + test_dir (str): test directory + verbose (int): verbosity + """ # Create and move to the test directory pwd = os.getcwd() os.makedirs( test_dir, exist_ok=True ) @@ -159,12 +207,11 @@ def run_unit_tests( test_dir, verbose ): os.chdir( pwd ) -def main(): - """Entry point for the geosx_xml_tools unit tests +def main() -> None: + """Entry point for the geosx_xml_tools unit tests. @arg -o/--output Output directory (default = ./test_results) """ - # Parse the user arguments parser = argparse.ArgumentParser() parser.add_argument( '-t', '--test_dir', type=str, help='Test output directory', default='./test_results' ) diff --git a/geos-xml-tools/src/geos/xml_tools/unit_manager.py b/geos-xml-tools/src/geos/xml_tools/unit_manager.py index 228c505e6..1747fc2d0 100644 --- a/geos-xml-tools/src/geos/xml_tools/unit_manager.py +++ b/geos-xml-tools/src/geos/xml_tools/unit_manager.py @@ -1,8 +1,8 @@ -"""Tools for managing units in GEOSX""" - import re from geos.xml_tools import regex_tools -from typing import List, Any, Dict, Union +from typing import List, Any, Dict + +__doc__ = """Tools for managing units in GEOSX.""" class UnitManager(): @@ -23,7 +23,6 @@ def __call__( self, unitStruct: List[ Any ] ) -> str: Returns: str: The string with evaluated unit definitions """ - # Replace all instances of units in the string with their scale defined in self.units symbolicUnits = re.sub( regex_tools.patterns[ 'units_b' ], self.unitMatcher, unitStruct[ 1 ] ) @@ -53,7 +52,6 @@ def regexHandler( self, match: re.Match ) -> str: def buildUnits( self ) -> None: """Build the unit definitions.""" - # yapf: disable # Long, short names for SI prefixes unit_dict_type = Dict[str, Dict[str, Any]] @@ -133,9 +131,9 @@ def buildUnits( self ) -> None: unit_defs[ alt ] = { 'value': unit_defs[ u ][ 'value' ], 'usePrefix': unit_defs[ u ][ 'usePrefix' ] } # Combine the results into the final dictionary - for u in unit_defs.keys(): + for u in unit_defs: if ( unit_defs[ u ][ 'usePrefix' ] ): - for p in prefixes.keys(): + for p in prefixes: self.units[ p + u ] = prefixes[ p ][ 'value' ] * unit_defs[ u ][ 'value' ] else: self.units[ u ] = unit_defs[ u ][ 'value' ] diff --git a/geos-xml-tools/src/geos/xml_tools/xml_formatter.py b/geos-xml-tools/src/geos/xml_tools/xml_formatter.py index 8edfa1e3f..fbb93e58e 100644 --- a/geos-xml-tools/src/geos/xml_tools/xml_formatter.py +++ b/geos-xml-tools/src/geos/xml_tools/xml_formatter.py @@ -6,7 +6,7 @@ def format_attribute( attribute_indent: str, ka: str, attribute_value: str ) -> str: - """Format xml attribute strings + """Format xml attribute strings. Args: attribute_indent (str): Attribute indent string @@ -31,7 +31,7 @@ def format_attribute( attribute_indent: str, ka: str, attribute_value: str ) -> split_positions: List[ Any ] = [ match.end() for match in re.finditer( r"}\s*,", attribute_value ) ] newline_indent = '\n%s' % ( ' ' * ( len( attribute_indent ) + len( ka ) + 4 ) ) new_values = [] - for a, b in zip( [ None ] + split_positions, split_positions + [ None ] ): + for a, b in zip( [ None ] + split_positions, split_positions + [ None ], strict=False ): new_values.append( attribute_value[ a:b ].strip() ) if new_values: attribute_value = newline_indent.join( new_values ) @@ -48,7 +48,7 @@ def format_xml_level( output: TextIO, sort_attributes: bool = False, close_tag_newline: bool = False, include_namespace: bool = False ) -> None: - """Iteratively format the xml file + """Iteratively format the xml file. Args: output (file): the output text file handle @@ -61,7 +61,6 @@ def format_xml_level( output: TextIO, close_tag_newline (bool): option to place close tag on a separate line include_namespace (bool): option to include the xml namespace in the output """ - # Handle comments if node.tag is ElementTree.Comment: output.write( '\n%s' % ( indent * level, node.text ) ) @@ -110,7 +109,7 @@ def format_xml_level( output: TextIO, if len( node ): output.write( '>' ) Nc = len( node ) - for ii, child in zip( range( Nc ), node ): + for ii, child in zip( range( Nc ), node, strict=False ): format_xml_level( output, child, level + 1, indent, block_separation_max_depth, modify_attribute_indent, sort_attributes, close_tag_newline, include_namespace ) @@ -135,7 +134,7 @@ def format_file( input_fname: str, alphebitize_attributes: bool = False, close_style: bool = False, namespace: bool = False ) -> None: - """Script to format xml files + """Script to format xml files. Args: input_fname (str): Input file name @@ -176,11 +175,11 @@ def format_file( input_fname: str, except ElementTree.ParseError as err: print( '\nCould not load file: %s' % ( fname ) ) print( err.msg ) - raise Exception( '\nCheck input file!' ) + raise Exception( '\nCheck input file!' ) from err def main() -> None: - """Script to format xml files + """Script to format xml files. Args: input (str): Input file name diff --git a/geos-xml-tools/src/geos/xml_tools/xml_processor.py b/geos-xml-tools/src/geos/xml_tools/xml_processor.py index 9795c4b52..b822abd12 100644 --- a/geos-xml-tools/src/geos/xml_tools/xml_processor.py +++ b/geos-xml-tools/src/geos/xml_tools/xml_processor.py @@ -1,5 +1,3 @@ -"""Tools for processing xml files in GEOSX""" - from lxml import etree as ElementTree # type: ignore[import] from lxml.etree import XMLSyntaxError # type: ignore[import] import re @@ -12,6 +10,8 @@ unitManager = unit_manager.UnitManager() parameterHandler = regex_tools.DictRegexHandler() +__doc__ = """Tools for processing xml files in GEOSX.""" + def merge_xml_nodes( existingNode: ElementTree.Element, targetNode: ElementTree.Element, level: int ) -> None: """Merge nodes in an included file into the current structure level by level. @@ -21,9 +21,8 @@ def merge_xml_nodes( existingNode: ElementTree.Element, targetNode: ElementTree. targetNode (lxml.etree.Element): The node to insert. level (int): The xml file depth. """ - # Copy attributes on the current level - for tk in targetNode.attrib.keys(): + for tk in targetNode.attrib: existingNode.set( tk, targetNode.get( tk ) ) # Copy target children into the xml structure @@ -69,7 +68,6 @@ def merge_included_xml_files( root: ElementTree.Element, fname: str, includeCoun includeCount (int): The current recursion depth. maxInclude (int): The maximum number of xml files to include (default = 100) """ - # Expand the input path pwd = os.getcwd() includePath, fname = os.path.split( os.path.abspath( os.path.expanduser( fname ) ) ) @@ -93,7 +91,7 @@ def merge_included_xml_files( root: ElementTree.Element, fname: str, includeCoun except XMLSyntaxError as err: print( '\nCould not load included file: %s' % ( fname ) ) print( err.msg ) - raise Exception( '\nCheck included file!' ) + raise Exception( '\nCheck included file!' ) from err # Recursively add the includes: for includeNode in includeRoot.findall( 'Included' ): @@ -106,14 +104,12 @@ def merge_included_xml_files( root: ElementTree.Element, fname: str, includeCoun def apply_regex_to_node( node: ElementTree.Element ) -> None: - """Apply regexes that handle parameters, units, and symbolic math to each - xml attribute in the structure. + """Apply regexes that handle parameters, units, and symbolic math to each xml attribute in the structure. Args: node (lxml.etree.Element): The target node in the xml structure. """ - - for k in node.attrib.keys(): + for k in node.attrib: value = node.get( k ) # Parameter format: $Parameter or $:Parameter @@ -143,7 +139,7 @@ def apply_regex_to_node( node: ElementTree.Element ) -> None: def generate_random_name( prefix: str = '', suffix: str = '.xml' ) -> str: - """If the target name is not specified, generate a random name for the compiled xml + """If the target name is not specified, generate a random name for the compiled xml. Args: prefix (str): The file prefix (default = ''). @@ -160,14 +156,15 @@ def generate_random_name( prefix: str = '', suffix: str = '.xml' ) -> str: return '%s%s%s' % ( prefix, md5( tmp.encode( 'utf-8' ) ).hexdigest(), suffix ) -def process( inputFiles: Iterable[ str ], - outputFile: str = '', - schema: str = '', - verbose: int = 0, - parameter_override: List[ Tuple[ str, str ] ] = [], - keep_parameters: bool = True, - keep_includes: bool = True ) -> str: - """Process an xml file +def process( + inputFiles: Iterable[ str ], + outputFile: str = '', + schema: str = '', + verbose: int = 0, + parameter_override: List[ Tuple[ str, str ] ] = [], # noqa: B006 + keep_parameters: bool = True, + keep_includes: bool = True ) -> str: + """Process an xml file. Args: inputFiles (list): Input file names. @@ -206,7 +203,7 @@ def process( inputFiles: Iterable[ str ], except XMLSyntaxError as err: print( '\nCould not load input file: %s' % ( single_input ) ) print( err.msg ) - raise Exception( '\nCheck input file!' ) + raise Exception( '\nCheck input file!' ) from err else: # For multiple inputs, create a simple xml structure to hold @@ -278,7 +275,8 @@ def process( inputFiles: Iterable[ str ], # Check for un-matched special characters with open( outputFile, 'r' ) as ofile: for line in ofile: - if any( [ sc in line for sc in [ '$', '[', ']', '`' ] ] ): + print() + if any( [ sc in line for sc in [ '$', '[', ']', '`' ] ] ): #noqa: C419 raise Exception( 'Found un-matched special characters in the pre-processed input file on line:\n%s\n Check your input xml for errors!' % ( line ) ) diff --git a/geos-xml-tools/src/geos/xml_tools/xml_redundancy_check.py b/geos-xml-tools/src/geos/xml_tools/xml_redundancy_check.py index b81411570..42773782b 100644 --- a/geos-xml-tools/src/geos/xml_tools/xml_redundancy_check.py +++ b/geos-xml-tools/src/geos/xml_tools/xml_redundancy_check.py @@ -10,7 +10,7 @@ def check_redundancy_level( local_schema: Dict[ str, Any ], node: ElementTree.Element, whitelist: Iterable[ str ] = [ 'component' ] ) -> int: - """Check xml redundancy at the current level + """Check xml redundancy at the current level. Args: local_schema (dict): Schema definitions @@ -21,14 +21,11 @@ def check_redundancy_level( local_schema: Dict[ str, Any ], int: Number of required attributes in the node and its children """ node_is_required = 0 - for ka in node.attrib.keys(): - if ( ka in whitelist ): - node_is_required += 1 - elif ( ka not in local_schema[ 'attributes' ] ): - node_is_required += 1 - elif ( 'default' not in local_schema[ 'attributes' ][ ka ] ): - node_is_required += 1 - elif ( node.get( ka ) != local_schema[ 'attributes' ][ ka ][ 'default' ] ): + for ka in node.attrib: + if ( ka in whitelist ) or ( ka not in local_schema[ 'attributes' ] ) or ( + 'default' + not in local_schema[ 'attributes' ][ ka ] ) or ( node.get( ka ) + != local_schema[ 'attributes' ][ ka ][ 'default' ] ): node_is_required += 1 else: node.attrib.pop( ka ) @@ -45,7 +42,7 @@ def check_redundancy_level( local_schema: Dict[ str, Any ], def check_xml_redundancy( schema: Dict[ str, Any ], fname: str ) -> None: - """Check redundancy in an xml file + """Check redundancy in an xml file. Args: schema (dict): Schema definitions @@ -59,12 +56,11 @@ def check_xml_redundancy( schema: Dict[ str, Any ], fname: str ) -> None: def process_xml_files( geosx_root: str ) -> None: - """Test for xml redundancy + """Test for xml redundancy. Args: geosx_root (str): GEOSX root directory """ - # Parse the schema geosx_root = os.path.expanduser( geosx_root ) schema_fname = '%ssrc/coreComponents/schema/schema.xsd' % ( geosx_root ) @@ -80,12 +76,11 @@ def process_xml_files( geosx_root: str ) -> None: def main() -> None: - """Entry point for the xml attribute usage test script + """Entry point for the xml attribute usage test script. Args: -r/--root (str): GEOSX root directory """ - # Parse the user arguments parser = command_line_parsers.build_xml_redundancy_input_parser() args = parser.parse_args() diff --git a/geos-xml-viewer/pyproject.toml b/geos-xml-viewer/pyproject.toml index 5bc56447f..9b402a3d0 100644 --- a/geos-xml-viewer/pyproject.toml +++ b/geos-xml-viewer/pyproject.toml @@ -36,6 +36,7 @@ dependencies = [ "lxml >= 4.9", "xsdata >= 24", "colorcet >= 3.0.1", + "typing_extensions>=4.12", ] requires-python = ">= 3.9" diff --git a/geos-xml-viewer/src/PVplugins/deckReader.py b/geos-xml-viewer/src/PVplugins/deckReader.py index 53f72bf1b..239634577 100644 --- a/geos-xml-viewer/src/PVplugins/deckReader.py +++ b/geos-xml-viewer/src/PVplugins/deckReader.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. # SPDX-FileContributor: Lionel Untereiner -from typing import Self +from typing_extensions import Self from paraview.util.vtkAlgorithm import smdomain, smhint, smproperty, smproxy from vtkmodules.util.vtkAlgorithm import VTKPythonAlgorithmBase diff --git a/hdf5-wrapper/pyproject.toml b/hdf5-wrapper/pyproject.toml index ee0a0c14c..abc15a4a1 100644 --- a/hdf5-wrapper/pyproject.toml +++ b/hdf5-wrapper/pyproject.toml @@ -16,6 +16,7 @@ requires-python = ">=3.8" dependencies = [ "h5py>=2.10.0", "numpy>=1.16.2", + "typing_extensions>=4.12", ] [project.scripts] diff --git a/hdf5-wrapper/src/geos/hdf5_wrapper/__init__.py b/hdf5-wrapper/src/geos/hdf5_wrapper/__init__.py index 0f724ef67..48befb2c1 100644 --- a/hdf5-wrapper/src/geos/hdf5_wrapper/__init__.py +++ b/hdf5-wrapper/src/geos/hdf5_wrapper/__init__.py @@ -1 +1 @@ -from .wrapper import hdf5_wrapper +from .wrapper import hdf5_wrapper # noqa: F401 diff --git a/hdf5-wrapper/src/geos/hdf5_wrapper/use_example.py b/hdf5-wrapper/src/geos/hdf5_wrapper/use_example.py index 354254586..9090042cb 100644 --- a/hdf5-wrapper/src/geos/hdf5_wrapper/use_example.py +++ b/hdf5-wrapper/src/geos/hdf5_wrapper/use_example.py @@ -4,15 +4,14 @@ def print_database_iterative( database: hdf5_wrapper, level: int = 0 ) -> None: - """ - Print the database targets iteratively by level + """Print the database targets iteratively by level. Args: - database (hdf5_wrapper) the wrapper for the current database + database (hdf5_wrapper): the wrapper for the current database level (int): the depth within the database """ # Note: you can also iterate over the hdf5_wrapper object directly - for k in database.keys(): + for k in database: print( '%s%s' % ( ' ' * level, k ) ) if isinstance( database[ k ], hdf5_wrapper ): @@ -25,10 +24,7 @@ def print_database_iterative( database: hdf5_wrapper, level: int = 0 ) -> None: def read_write_hdf5_database_example() -> None: - """ - Simple demonstration of hdf5_wrapper - """ - + """Simple demonstration of hdf5_wrapper.""" # ------------------------ # Generate test data # ------------------------ diff --git a/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper.py b/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper.py index cd2bafd51..c867fb4c7 100644 --- a/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper.py +++ b/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper.py @@ -1,7 +1,7 @@ import h5py # type: ignore[import] import numpy as np from numpy.core.defchararray import encode, decode -from typing import Union, Dict, Any, Iterable, Optional, Tuple +from typing_extensions import Self, Union, Dict, Any, Iterable, Optional, Tuple # Note: I would like to replace Any here with str, float, int, np.ndarray, etc. # However, this heterogeneous pattern causes issues with mypy indexing @@ -11,13 +11,10 @@ class hdf5_wrapper(): - """ - A class for reading/writing hdf5 files, which behaves similar to a native dict - """ + """A class for reading/writing hdf5 files, which behaves similar to a native dict.""" - def __init__( self, fname: str = '', target: Optional[ h5py.File ] = None, mode: str = 'r' ) -> None: - """ - Initialize the hdf5_wrapper class + def __init__( self: Self, fname: str = '', target: Optional[ h5py.File ] = None, mode: str = 'r' ) -> None: + """Initialize the hdf5_wrapper class. If the fname is supplied (either by a positional or keyword argument), the wrapper will open a hdf5 database from the filesystem. @@ -30,18 +27,17 @@ def __init__( self, fname: str = '', target: Optional[ h5py.File ] = None, mode: be created using an existing database handle. Args: - fname (str): the filename of a new or existing hdf5 database - target (hdf5_wrapper): the handle of an existing hdf5 dataset - mode (str): the read/write behavior of the database (default='r') + fname (str): the filename of a new or existing hdf5 database. Defaults to ''. + target (hdf5_wrapper): the handle of an existing hdf5 dataset. Defaults to None. + mode (str): the read/write behavior of the database. Defaults to 'r'. """ self.mode: str = mode self.target: h5py.File = target if fname: self.target = h5py.File( fname, self.mode ) - def __getitem__( self, k: str ) -> hdf5_get_types: - """ - Get a target from the database + def __getitem__( self: Self, k: str ) -> hdf5_get_types: + """Get a target from the database. If the target is not present in the datastructure and the database is open in read/write mode, the wrapper will create a @@ -78,9 +74,8 @@ def __getitem__( self, k: str ) -> hdf5_get_types: else: return tmp - def __setitem__( self, k: str, value: hdf5_set_types ): - """ - Write an object to the database if write-mode is enabled + def __setitem__( self: Self, k: str, value: hdf5_set_types ) -> None: + """Write an object to the database if write-mode is enabled. Args: k (str): the name of the object @@ -109,9 +104,8 @@ def __setitem__( self, k: str, value: hdf5_set_types ): 'Cannot write to an hdf5 opened in read-only mode! This can be changed by overriding the default mode argument for the wrapper.' ) - def link( self, k: str, target: str ) -> None: - """ - Link an external hdf5 file to this location in the database + def link( self: Self, k: str, target: str ) -> None: + """Link an external hdf5 file to this location in the database. Args: k (str): the name of the new link in the database @@ -119,9 +113,8 @@ def link( self, k: str, target: str ) -> None: """ self.target[ k ] = h5py.ExternalLink( target, '/' ) - def keys( self ) -> Iterable[ str ]: - """ - Get a list of groups and arrays located at the current level + def keys( self: Self ) -> Iterable[ str ]: + """Get a list of groups and arrays located at the current level. Returns: list: a list of key names pointing to objects at the current level @@ -131,47 +124,41 @@ def keys( self ) -> Iterable[ str ]: else: raise ValueError( 'Object not a group!' ) - def values( self ) -> Iterable[ hdf5_get_types ]: - """ - Get a list of values located on the current level - """ + def values( self: Self ) -> Iterable[ hdf5_get_types ]: + """Get a list of values located on the current level.""" return [ self[ k ] for k in self.keys() ] - def items( self ) -> Iterable[ Tuple[ str, hdf5_get_types ] ]: - return zip( self.keys(), self.values() ) + def items( self: Self ) -> Iterable[ Tuple[ str, hdf5_get_types ] ]: + """Get the list if pair (key, value) located at the current level. - def __enter__( self ): - """ - Entry point for an iterator + Returns: + Iterable[ Tuple[ str, hdf5_get_types ] ]: list of pair (key, value) """ + return zip( self.keys(), self.values() ) + + def __enter__( self: Self ) -> Self: + """Entry point for an iterator.""" return self - def __exit__( self, type, value, traceback ) -> None: - """ - End point for an iterator - """ + def __exit__( self: Self ) -> None: + """End point for an iterator.""" self.target.close() - def __del__( self ) -> None: - """ - Closes the database on wrapper deletion - """ + def __del__( self: Self ) -> None: + """Closes the database on wrapper deletion.""" try: if isinstance( self.target, h5py._hl.files.File ): self.target.close() - except: + except: #noqa: E722 pass - def close( self ) -> None: - """ - Closes the database - """ + def close( self: Self ) -> None: + """Closes the database.""" if isinstance( self.target, h5py._hl.files.File ): self.target.close() - def get_copy( self ) -> nested_dict_type: - """ - Copy the entire database into memory + def get_copy( self: Self ) -> nested_dict_type: + """Copy the entire database into memory. Returns: dict: a dictionary holding the database contents @@ -186,18 +173,16 @@ def get_copy( self ) -> nested_dict_type: return result - def copy( self ) -> nested_dict_type: - """ - Copy the entire database into memory + def copy( self: Self ) -> nested_dict_type: + """Copy the entire database into memory. Returns: dict: a dictionary holding the database contents """ return self.get_copy() - def insert( self, x: Union[ nested_dict_type, 'hdf5_wrapper' ] ) -> None: - """ - Insert the contents of the target object to the current location + def insert( self: Self, x: Union[ nested_dict_type, 'hdf5_wrapper' ] ) -> None: + """Insert the contents of the target object to the current location. Args: x (dict, hdf5_wrapper): the dictionary to insert diff --git a/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper_tests.py b/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper_tests.py index de871b0c4..b56f391e9 100644 --- a/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper_tests.py +++ b/hdf5-wrapper/src/geos/hdf5_wrapper/wrapper_tests.py @@ -1,3 +1,5 @@ +from typing_extensions import Self +from typing import Any import unittest import os import argparse @@ -7,11 +9,28 @@ from geos.hdf5_wrapper import hdf5_wrapper -def random_string( N ): +def random_string( N: int ) -> str: + """Generate random string. + + Args: + N (int): int + + Returns: + _type_: str + """ return ''.join( random.choices( string.ascii_uppercase + string.ascii_lowercase + string.digits, k=N ) ) -def build_test_dict( depth=0, max_depth=3 ): +def build_test_dict( depth: int = 0, max_depth: int = 3 ) -> dict[ str, Any ]: + """Build test dictionnary. + + Args: + depth (int, optional): depth. Defaults to 0. + max_depth (int, optional): maximum depth. Defaults to 3. + + Returns: + dict[str, Any]: test dictionnary. + """ r = [ np.random.randint( 2, 20 ) for x in range( 5 ) ] test = { 'int': np.random.randint( -1000000, 1000000 ), @@ -33,12 +52,22 @@ def build_test_dict( depth=0, max_depth=3 ): class Testhdf5_wrapper( unittest.TestCase ): @classmethod - def setUpClass( cls ): + def setUpClass( cls ) -> None: + """Set the tests up.""" cls.test_dir = 'wrapper_tests' # type: ignore[attr-defined] os.makedirs( cls.test_dir, exist_ok=True ) # type: ignore[attr-defined] cls.test_dict = build_test_dict() # type: ignore[attr-defined] - def compare_wrapper_dict( self, x, y ): + def compare_wrapper_dict( self, x: dict[ str, Any ], y: dict[ str, Any ] ) -> None: + """Compare dictionnary wrapper. + + Args: + x (dict[str, Any]): first dict + y (dict[str, Any]): second dict + + Raises: + Exception: Key in dictionnary + """ kx = x.keys() ky = y.keys() @@ -65,16 +94,19 @@ def compare_wrapper_dict( self, x, y ): else: self.assertTrue( vx == vy ) - def test_a_insert_write( self ): + def test_a_insert_write( self: Self ) -> None: + """Test insert.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_insert.hdf5' ), mode='w' ) # type: ignore[attr-defined] data.insert( self.test_dict ) # type: ignore[attr-defined] - def test_b_manual_write( self ): + def test_b_manual_write( self: Self ) -> None: + """Test manual write.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_manual.hdf5' ), mode='w' ) # type: ignore[attr-defined] for k, v in self.test_dict.items(): # type: ignore[attr-defined] data[ k ] = v - def test_c_link_write( self ): + def test_c_link_write( self: Self ) -> None: + """Test of link.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_linked.hdf5' ), mode='w' ) # type: ignore[attr-defined] for k, v in self.test_dict.items(): # type: ignore[attr-defined] if ( 'child' in k ): @@ -85,31 +117,34 @@ def test_c_link_write( self ): else: data[ k ] = v - def test_d_compare_wrapper( self ): + def test_d_compare_wrapper( self: Self ) -> None: + """Test of compare_wrapper.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_insert.hdf5' ) ) # type: ignore[attr-defined] self.compare_wrapper_dict( self.test_dict, data ) # type: ignore[attr-defined] - def test_e_compare_wrapper_copy( self ): + def test_e_compare_wrapper_copy( self: Self ) -> None: + """Test of compare_wrapper.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_insert.hdf5' ) ) # type: ignore[attr-defined] tmp = data.copy() self.compare_wrapper_dict( self.test_dict, tmp ) # type: ignore[attr-defined] - def test_f_compare_wrapper( self ): + def test_f_compare_wrapper( self: Self ) -> None: + """Test of compare_wrapper.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_manual.hdf5' ) ) # type: ignore[attr-defined] self.compare_wrapper_dict( self.test_dict, data ) # type: ignore[attr-defined] - def test_g_compare_wrapper( self ): + def test_g_compare_wrapper( self: Self ) -> None: + """Test of compare_wrapper.""" data = hdf5_wrapper( os.path.join( self.test_dir, 'test_linked.hdf5' ) ) # type: ignore[attr-defined] self.compare_wrapper_dict( self.test_dict, data ) # type: ignore[attr-defined] -def main(): - """Entry point for the geosx_xml_tools unit tests +def main() -> None: + """Entry point for the geosx_xml_tools unit tests. Args: -v/--verbose (int): Output verbosity """ - # Parse the user arguments parser = argparse.ArgumentParser() parser.add_argument( '-v', '--verbose', type=int, help='Verbosity level', default=2 )