diff --git a/changelogs/fragments/2256-zos_archive-interface-update.yml b/changelogs/fragments/2256-zos_archive-interface-update.yml new file mode 100644 index 000000000..6b8f5102d --- /dev/null +++ b/changelogs/fragments/2256-zos_archive-interface-update.yml @@ -0,0 +1,7 @@ +breaking_changes: + - zos_lineinfile - Option ``format.format_options`` is deprecated in favor of ``format.options``. + Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. + Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. + Option ``format.format_options.terse_pack`` is deprecated in favor of ``format.options.spack`` as a new boolean option. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). + diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index e5dd8e975..2cca37a31 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -1108,12 +1108,6 @@ def _add_alias(self, arg_name, arg_aliases=None, aliases=None): aliases = {} arg_aliases.append(arg_name) for alternate_name in arg_aliases: - if aliases.get(alternate_name, arg_name) != arg_name: - raise ValueError( - 'Conflicting aliases "{0}" and "{1}" found for name "{2}"'.format( - aliases.get(alternate_name), alternate_name, arg_name - ) - ) aliases[alternate_name] = arg_name return aliases diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 5f8ca037d..b6faf3161 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -454,7 +454,7 @@ def main(): deprecated_aliases=[ dict( name='data_set_name', - version='2.0.0', + version='3.0.0', collection_name='ibm.ibm_zos_core') ], ), diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index a02ec00a5..158250ad4 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -51,7 +51,7 @@ type: dict required: false suboptions: - name: + type: description: - The compression format to use. type: str @@ -65,27 +65,27 @@ - terse - xmit - pax - format_options: + aliases: [ name ] + options: description: - Options specific to a compression format. type: dict required: false + aliases: [ format_options ] suboptions: - terse_pack: + spack: description: - Compression option for use with the terse format, - I(name=terse). + I(type=terse). - Pack will compress records in a data set so that the output results in lossless data compression. - Spack will compress records in a data set so the output results in complex data compression. - Spack will produce smaller output and take approximately 3 times longer than pack compression. - type: str + type: bool required: false - choices: - - pack - - spack + default: true xmit_log_data_set: description: - Provide the name of a data set to store xmit log output. @@ -97,7 +97,7 @@ - When providing the I(xmit_log_data_set) name, ensure there is adequate space. type: str - use_adrdssu: + adrdssu: description: - If set to true, the C(zos_archive) module will use Data Facility Storage Management Subsystem data set services @@ -105,6 +105,7 @@ portable format before using C(xmit) or C(terse). type: bool default: false + aliases: [ use_adrdssu ] dest: description: - The remote absolute path or data set where the archive should be @@ -358,7 +359,7 @@ retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - - When packing and using C(use_adrdssu) flag the module will take up to two + - When packing and using C(adrdssu) flag the module will take up to two times the space indicated in C(dest_data_set). - tar, zip, bz2 and pax are archived using python C(tarfile) library which uses the latest version available for each format, for compatibility when @@ -378,7 +379,7 @@ src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: - name: tar + type: tar # Archive multiple files - name: Archive list of files into a zip @@ -388,7 +389,7 @@ - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip format: - name: zip + type: zip # Archive one data set into terse - name: Archive data set into a terse @@ -396,7 +397,7 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse # Use terse with different options - name: Archive data set into a terse, specify pack algorithm and use adrdssu @@ -404,10 +405,10 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - terse_pack: "spack" - use_adrdssu: true + type: terse + options: + spack: true + adrdssu: true # Use a pattern to store - name: Archive data set pattern using xmit @@ -416,7 +417,7 @@ exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit + type: xmit - name: Archive multiple GDSs into a terse zos_archive: @@ -426,25 +427,25 @@ - "USER.GDG(-2)" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: src: "USER.ARCHIVE.*" dest: "USER.GDG(+1)" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Encode the source data set into Latin-1 before archiving into a terse data set zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -456,9 +457,9 @@ - "USER.ARCHIVE2.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true encoding: from: IBM-1047 to: ISO8859-1 @@ -467,6 +468,12 @@ ''' RETURN = r''' +dest: + description: + - The remote absolute path or data set where the archive was + created. + type: str + returned: always state: description: - The state of the input C(src). @@ -574,7 +581,7 @@ def get_archive_handler(module): The archive format for the module. """ - format = module.params.get("format").get("name") + format = module.params.get("format").get("type") if format in ["tar", "gz", "bz2", "pax"]: return TarArchive(module) elif format == "terse": @@ -705,7 +712,7 @@ def __init__(self, module): """ self.module = module self.dest = module.params['dest'] - self.format = module.params.get("format").get("name") + self.format = module.params.get("format").get("type") self.remove = module.params['remove'] self.changed = False self.errors = [] @@ -1179,7 +1186,7 @@ def __init__(self, module): ---------- original_checksums : str The SHA256 hash of the contents of input file. - use_adrdssu : bool + adrdssu : bool Whether to use Data Facility Storage Management Subsystem data set services program ADRDSSU to uncompress data sets or not. expanded_sources : list[str] @@ -1198,7 +1205,7 @@ def __init__(self, module): super(MVSArchive, self).__init__(module) self.tmphlq = module.params.get("tmp_hlq") self.original_checksums = self.dest_checksums() - self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") + self.adrdssu = module.params.get("format").get("options").get("adrdssu") self.expanded_sources = self.expand_mvs_paths(self.sources) self.expanded_exclude_sources = self.expand_mvs_paths(module.params['exclude']) self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) @@ -1339,18 +1346,6 @@ def create_dest_ds(self, name): """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) - # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) - # rc, out, err = self.module.run_command(cmd) - - # if not changed: - # self.module.fail_json( - # msg="Failed preparing {0} to be used as an archive".format(name), - # stdout=out, - # stderr=err, - # stdout_lines=cmd, - # rc=rc, - # ) return name def dump_into_temp_ds(self, temp_ds): @@ -1619,13 +1614,10 @@ def __init__(self, module): Compression option for use with the terse format. """ super(AMATerseArchive, self).__init__(module) - self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + spack = module.params.get("format").get("options").get("spack") # We store pack_ard in uppercase because the AMATerse command requires # it in uppercase. - if self.pack_arg is None: - self.pack_arg = "SPACK" - else: - self.pack_arg = self.pack_arg.upper() + self.pack_arg = "SPACK" if spack else "PACK" def add(self, src, archive): """Archive src into archive using AMATERSE program. @@ -1665,9 +1657,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1682,7 +1674,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.targets) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.targets[0] dataset = data_set.MVSDataSet( name=self.dest, @@ -1714,7 +1706,7 @@ def __init__(self, module): The name of the data set to store xmit log output. """ super(XMITArchive, self).__init__(module) - self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") + self.xmit_log_data_set = module.params.get("format").get("options").get("xmit_log_data_set") def add(self, src, archive): """Archive src into archive using TSO XMIT. @@ -1759,9 +1751,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1776,7 +1768,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.sources) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.sources[0] # dest = self.create_dest_ds(self.dest) dataset = data_set.MVSDataSet( @@ -1871,25 +1863,49 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], + deprecated_aliases=[ + dict( + name='name', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ), - format_options=dict( + options=dict( type='dict', required=False, + aliases=['format_options'], + deprecated_aliases=[ + dict( + name='format_options', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], options=dict( - terse_pack=dict( - type='str', - choices=['pack', 'spack'], + spack=dict( + type='bool', + default=True, ), xmit_log_data_set=dict( type='str', ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], + deprecated_aliases=[ + dict( + name='use_adrdssu', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ) ), ), @@ -1966,41 +1982,44 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( - terse_pack=dict( - type='str', + spack=dict( + type='bool', required=False, - choices=['pack', 'spack'], + default=True, ), xmit_log_data_set=dict( type='str', required=False, ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], ) ), default=dict( - terse_pack="spack", + spack=True, xmit_log_data_set="", - use_adrdssu=False), + adrdssu=False), + aliases=['format_options'], ), ), default=dict( - name="", - format_options=dict( - terse_pack="spack", + type="", + options=dict( + spack=True, xmit_log_data_set="", - use_adrdssu=False + adrdssu=False ) ), ), diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index a48e79dff..83dff1026 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -131,7 +131,7 @@ def test_uss_single_archive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -162,7 +162,7 @@ def test_uss_single_archive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, mode=dest_mode ) @@ -191,7 +191,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -203,7 +203,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -216,7 +216,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, force=True, ) @@ -257,7 +257,7 @@ def test_uss_archive_multiple_files(ansible_zos_module, ds_format, path): src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, ) @@ -302,7 +302,7 @@ def test_uss_archive_multiple_files_with_exclude(ansible_zos_module, ds_format, src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, exclude=path.get("exclude") ) @@ -337,7 +337,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, ds_format): src=paths, dest=dest, format={ - "name":ds_format + "type":ds_format }, remove=True ) @@ -367,7 +367,7 @@ def test_uss_archive_encode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": TO_ENCODING, @@ -401,7 +401,7 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": FROM_ENCODING, @@ -432,10 +432,10 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): # List of tests: # - test_mvs_archive_single_dataset -# - test_mvs_archive_single_dataset_use_adrdssu +# - test_mvs_archive_single_dataset_adrdssu # - test_mvs_archive_single_data_set_remove_target # - test_mvs_archive_multiple_data_sets -# - test_mvs_archive_multiple_data_sets_use_adrdssu +# - test_mvs_archive_multiple_data_sets_adrdssu # - test_mvs_archive_multiple_data_sets_remove_target # - test_mvs_archive_multiple_data_sets_with_exclusion # - test_mvs_archive_multiple_data_sets_with_missing @@ -519,11 +519,11 @@ def test_mvs_archive_single_dataset( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -536,7 +536,7 @@ def test_mvs_archive_single_dataset( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -571,7 +571,7 @@ def test_mvs_archive_single_dataset( @pytest.mark.parametrize( "record_format", ["fb", "vb"], ) -def test_mvs_archive_single_dataset_use_adrdssu( +def test_mvs_archive_single_dataset_adrdssu( ansible_zos_module, ds_format, data_set, @@ -617,13 +617,13 @@ def test_mvs_archive_single_dataset_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -635,7 +635,7 @@ def test_mvs_archive_single_dataset_use_adrdssu( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -699,11 +699,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -717,11 +717,17 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + # Changed to using the exact data set name in dls + # because using wildcards would fail. + # Assert archive data set is in place + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - assert src_data_set != c_result.get("stdout") + # Assert src_data_set is removed + cmd_result = hosts.all.shell(cmd = f"dls {src_data_set}") + for c_result in cmd_result.contacted.values(): + assert f"BGYSC1103E No datasets match pattern: {src_data_set}." in c_result.get("stderr") finally: hosts.all.zos_data_set(name=src_data_set, state="absent") hosts.all.zos_data_set(name=archive_data_set, state="absent") @@ -773,12 +779,12 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -791,7 +797,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) assert result.get("dest") == archive_data_set for ds in target_ds_list: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -846,12 +852,12 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) exclude = f"{src_data_set}1" archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", @@ -869,8 +875,8 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo assert exclude not in result.get("archived") else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -924,12 +930,12 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, ds_format hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -1004,12 +1010,12 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form path_list = [ds.get("name") for ds in target_ds_list] format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, dest=archive_data_set, @@ -1027,8 +1033,8 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form assert ds.get("name") not in result.get("archived") else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1092,11 +1098,11 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -1122,8 +1128,8 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1165,10 +1171,10 @@ def test_gdg_archive(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1179,7 +1185,7 @@ def test_gdg_archive(ansible_zos_module, dstype, format): assert result.get("dest") == archive_data_set assert f"{data_set_name}.G0001V00" in result.get("archived") assert f"{data_set_name}.G0002V00" in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}' ") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1208,10 +1214,10 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=data_set_name, dest=f"{archive_data_set}(+1)", @@ -1220,9 +1226,9 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in archive_result.contacted.values(): assert result.get("changed") is True assert data_set_name in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}*' ") for c_result in cmd_result.contacted.values(): - assert archive_data_set in c_result.get("stdout") + assert f"{archive_data_set}.G0001V00" in c_result.get("stdout") finally: hosts.all.shell(cmd=f"drm {hlq}.*") @@ -1289,11 +1295,11 @@ def test_mvs_archive_single_dataset_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -1306,7 +1312,7 @@ def test_mvs_archive_single_dataset_encoding( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1364,9 +1370,9 @@ def test_mvs_archive_multiple_dataset_pattern_encoding(ansible_zos_module, ds_fo ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1438,14 +1444,14 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} - #skipping some files to encode + format_dict["options"] = {"spack": True} + #skipping some files to encode skip_encoding_list = [matched_datasets[0]] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list - + for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1511,16 +1517,16 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib type="member", state="present" ) - + test_line = "pattern match" for ds_name in all_datasets_to_process: for member in data_set.get("members"): ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: original_hex_result = hosts.all.shell(cmd=f"dcat '{ds_name}' | od -x") @@ -1548,7 +1554,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib original_hex.append('*') else: parts = line.split() - if len(parts) > 1: + if len(parts) > 1: original_hex.extend(parts[1:]) reverted_hex = [] @@ -1564,13 +1570,13 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib try: original_first_star_idx = original_hex.index('*') except ValueError: - original_first_star_idx = len(original_hex) + original_first_star_idx = len(original_hex) try: reverted_first_star_idx = reverted_hex.index('*') except ValueError: reverted_first_star_idx = len(reverted_hex) - + original_hex_to_compare = original_hex[:original_first_star_idx] reverted_hex_to_compare = reverted_hex[:reverted_first_star_idx] @@ -1589,4 +1595,4 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib for ds_name in matched_datasets: hosts.all.zos_data_set(name=ds_name, state="absent") for archive_ds in archived_datasets: - hosts.all.zos_data_set(name=archive_ds, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=archive_ds, state="absent") diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index c773223ce..ac2fd27ab 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -689,10 +689,13 @@ def test_find_gdg_and_nonvsam_data_sets(ansible_zos_module): patterns=[f'{TEST_SUITE_HLQ}.*.*'], resource_type=["gdg", "nonvsam"], ) + data_sets = [{"name":data_set_name, "type": "NONVSAM"} for data_set_name in SEQ_NAMES] + data_sets.append({"name":gdg_b, "type": "GDG"}) for val in find_res.contacted.values(): assert val.get('msg') is None - assert len(val.get('data_sets')) == 4 - assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') + assert len(val.get('data_sets')) >= 4 + for data_set in data_sets: + assert data_set in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) finally: # Remove GDG. diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 5b4aff3df..c9ec47909 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -98,17 +98,11 @@ def create_multiple_data_sets(ansible_zos_module, base_name, n, ds_type, ): for i in range(n): curr_ds = { "name":base_name+str(i), - "type":ds_type, - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"dtouch -t{ds_type} '{base_name+str(i)}'") test_data_sets.append(curr_ds) # Create data sets in batch - ansible_zos_module.all.zos_data_set( - batch=test_data_sets - ) return test_data_sets @@ -117,15 +111,9 @@ def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): for i in range(n): curr_ds = { "name":f"{pds_name}({member_base_name}{i})", - "type":"member", - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"decho '' '{pds_name}({member_base_name}{i})'") test_members.append(curr_ds) - ansible_zos_module.all.zos_data_set( - batch=test_members - ) return test_members @@ -159,7 +147,7 @@ def test_uss_unarchive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -198,7 +186,7 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] @@ -242,7 +230,7 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -285,7 +273,7 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -322,7 +310,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) for file in list(USS_TEST_FILES.keys()): @@ -406,7 +394,7 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -448,7 +436,7 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -543,26 +531,13 @@ def test_mvs_unarchive_single_data_set( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -577,11 +552,11 @@ def test_mvs_unarchive_single_data_set( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type": ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack": True } archive_result = hosts.all.zos_archive( src=dataset, @@ -603,14 +578,15 @@ def test_mvs_unarchive_single_data_set( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, @@ -634,8 +610,8 @@ def test_mvs_unarchive_single_data_set( for result in cat_result.contacted.values(): assert result.get("stdout") == test_line finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -677,26 +653,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -711,13 +674,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } format_dict["format_options"] = { - "use_adrdssu":True + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["format_options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -732,14 +695,18 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) @@ -753,8 +720,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -803,12 +770,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -818,12 +785,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True ) @@ -839,8 +810,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f""" drm '{dataset}*' """) + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -893,12 +864,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -911,13 +882,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action include_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, include=[include_ds], ) @@ -938,8 +913,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( assert target_ds.get("name") not in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -992,12 +967,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1007,13 +982,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( # remote data_sets from host hosts.all.shell(cmd=f""" drm "{dataset}*" """) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action exclude_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, exclude=[exclude_ds], ) @@ -1033,8 +1012,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1083,12 +1062,12 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1098,12 +1077,16 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, list=True ) @@ -1119,8 +1102,8 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f"""drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1184,24 +1167,28 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, format=format_dict, ) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=force ) @@ -1221,8 +1208,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( assert result.get("changed") is False assert result.get("failed", False) is True finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1265,25 +1252,13 @@ def test_mvs_unarchive_single_data_set_remote_src( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - # Clean env - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present" - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -1298,13 +1273,13 @@ def test_mvs_unarchive_single_data_set_remote_src( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -1318,7 +1293,7 @@ def test_mvs_unarchive_single_data_set_remote_src( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") # fetch archive data set into tmp folder fetch_result = hosts.all.zos_fetch( @@ -1330,12 +1305,16 @@ def test_mvs_unarchive_single_data_set_remote_src( for res in fetch_result.contacted.values(): source_path = res.get("dest") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) @@ -1355,8 +1334,8 @@ def test_mvs_unarchive_single_data_set_remote_src( finally: - hosts.all.shell(cmd=f"drm {dataset}*") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") tmp_folder.cleanup() @@ -1378,9 +1357,6 @@ def test_mvs_unarchive_single_data_set_remote_src( }, ] ) -@pytest.mark.parametrize( - "record_length", [80] -) @pytest.mark.parametrize( "encoding", [ {"from": "IBM-1047", "to": "ISO8859-1"}, @@ -1390,7 +1366,6 @@ def test_mvs_unarchive_encoding( ansible_zos_module, ds_format, data_set, - record_length, encoding ): try: @@ -1399,26 +1374,14 @@ def test_mvs_unarchive_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + record_length = 80 # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1428,11 +1391,11 @@ def test_mvs_unarchive_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1454,20 +1417,21 @@ def test_mvs_unarchive_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1481,8 +1445,8 @@ def test_mvs_unarchive_encoding( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1524,26 +1488,13 @@ def test_mvs_unarchive_encoding_skip_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1553,11 +1504,11 @@ def test_mvs_unarchive_encoding_skip_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1579,12 +1530,13 @@ def test_mvs_unarchive_encoding_skip_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } - #skipping some files to encode + # skipping some files to encode skip_encoding_list = [dataset] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list @@ -1592,13 +1544,13 @@ def test_mvs_unarchive_encoding_skip_encoding( # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1612,8 +1564,8 @@ def test_mvs_unarchive_encoding_skip_encoding( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1668,12 +1620,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1683,12 +1635,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True, encoding=encoding @@ -1705,8 +1661,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @@ -1716,17 +1672,17 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): # False path source_path = "/tmp/OMVSADM.NULL" - format_dict = { + unarchive_format_dict = { "name":'terse' } - format_dict["format_options"] = { + unarchive_format_dict["format_options"] = { "use_adrdssu":True } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) @@ -1738,24 +1694,26 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize( - "format", [ + "ds_format", [ "terse", "xmit", ]) @pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) -def test_gdg_unarchive(ansible_zos_module, dstype, format): +def test_gdg_unarchive(ansible_zos_module, dstype, ds_format): try: HLQ = "ANSIBLE" hosts = ansible_zos_module data_set_name = get_tmp_ds_name(symbols=True) archive_data_set = get_tmp_ds_name(symbols=True) - results = hosts.all.zos_data_set( - batch = [ - { "name":data_set_name, "state":"present", "type":"gdg", "limit":3}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - ] - ) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1778,12 +1736,10 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): for ds in ds_to_write: hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds)) - format_dict = dict(name=format, format_options=dict()) - if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) - if format == "terse": - del format_dict["format_options"]["terse_pack"] + format_dict = dict(name=ds_format, options=dict()) + if ds_format == "terse": + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1797,16 +1753,16 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - - hosts.all.zos_data_set( - batch=[ - {"name": f"{data_set_name}(-1)", "state": "absent"}, - {"name": f"{data_set_name}(0)", "state": "absent"}, - ] - ) + hosts.all.shell(cmd=f"drm '{data_set_name}(-1)' && drm '{data_set_name}(0)'") + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True + } + } unarchive_result = hosts.all.zos_unarchive( src=archive_data_set, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) for result in unarchive_result.contacted.values(): @@ -1846,7 +1802,7 @@ def test_zos_unarchive_async(ansible_zos_module, get_config): archive_result = hosts_zos.all.zos_archive(src=list(USS_TEST_FILES.keys()), dest=dest, format=dict( - name=archive_format + type=archive_format )) # remove files for file in USS_TEST_FILES.keys():