Skip to content
Merged
3 changes: 3 additions & 0 deletions changelogs/fragments/2254-data_set-Enhance-error-message.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
minor_changes:
- zos_data_set - Enhances error messages when creating a Generation Data Group fails.
(https://github.com/ansible-collections/ibm_zos_core/pull/2254)
44 changes: 42 additions & 2 deletions plugins/module_utils/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,14 @@

try:
from zoautil_py import datasets, exceptions, gdgs, mvscmd, ztypes
from zoautil_py.exceptions import GenerationDataGroupCreateException
except ImportError:
datasets = ZOAUImportError(traceback.format_exc())
exceptions = ZOAUImportError(traceback.format_exc())
gdgs = ZOAUImportError(traceback.format_exc())
mvscmd = ZOAUImportError(traceback.format_exc())
ztypes = ZOAUImportError(traceback.format_exc())
GenerationDataGroupCreateException = ZOAUImportError(traceback.format_exc())


class DataSet(object):
Expand Down Expand Up @@ -2948,6 +2950,14 @@ def __init__(
# Removed escaping since is not needed by the GDG python api.
# self.name = DataSet.escape_data_set_name(self.name)

@staticmethod
def _validate_gdg_name(name):
"""Validates the length of a GDG name."""
if name and len(name) > 35:
raise GenerationDataGroupCreateError(
msg="GDG creation failed: dataset name exceeds 35 characters."
)

def create(self):
"""Creates the GDG.

Expand All @@ -2956,6 +2966,7 @@ def create(self):
int
Indicates if changes were made.
"""
GenerationDataGroup._validate_gdg_name(self.name)
gdg = gdgs.create(
name=self.name,
limit=self.limit,
Expand Down Expand Up @@ -2984,16 +2995,38 @@ def ensure_present(self, replace):
changed = False
present = False
gdg = None
name = arguments.get("name")

# Add this line to validate the name length before any operation
GenerationDataGroup._validate_gdg_name(name)

def _create_gdg(args):
try:
return gdgs.create(**args)
except exceptions._ZOAUExtendableException as e:
# Now, check if it's the specific exception we want to handle.
if isinstance(e, GenerationDataGroupCreateException):
stderr = getattr(e.response, 'stderr_response', '')
if "BGYSC5906E" in stderr :
raise GenerationDataGroupCreateError(msg="FIFO creation failed: the system may not support FIFO datasets or is not configured for it.")
elif "BGYSC6104E" in stderr :
raise GenerationDataGroupCreateError(msg="GDG creation failed: 'purge=true' requires 'scratch=true'.")
else:
raise GenerationDataGroupCreateError(msg=f"GDG creation failed. Raw error: {stderr}")
else:
# If it's a different ZOAU error, re-raise it.
raise e
if gdgs.exists(arguments.get("name")):
present = True

if not present:
gdg = gdgs.create(**arguments)
gdg = _create_gdg(arguments)

else:
if not replace:
return changed
changed = self.ensure_absent(True)
gdg = gdgs.create(**arguments)
gdg = _create_gdg(arguments)
if isinstance(gdg, gdgs.GenerationDataGroupView):
changed = True
return changed
Expand Down Expand Up @@ -3465,3 +3498,10 @@ def __init__(self, data_set):
"Make sure the generation exists and is active.".format(data_set)
)
super().__init__(self.msg)


class GenerationDataGroupCreateError(Exception):
def __init__(self, msg):
"""Error during creation of a Generation Data Group."""
self.msg = msg
super().__init__(self.msg)
6 changes: 2 additions & 4 deletions plugins/modules/zos_blockinfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -701,10 +701,8 @@ def main():
result['cmd'] = ret['data']['commands']
result['changed'] = ret['data']['changed']
result['found'] = ret['data']['found']
# Only populate 'rc' if stderr is not empty to not fail the playbook run in a nomatch case
if len(stderr):
result['stderr'] = str(stderr)
result['rc'] = rc
result['stderr'] = str(stderr)
result['rc'] = rc
module.exit_json(**result)


Expand Down
24 changes: 12 additions & 12 deletions tests/functional/modules/test_zos_archive_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def test_mvs_archive_single_dataset(
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert src_data_set in result.get("archived")
cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
finally:
Expand Down Expand Up @@ -635,7 +635,7 @@ def test_mvs_archive_single_dataset_adrdssu(
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert src_data_set in result.get("archived")
cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
finally:
Expand Down Expand Up @@ -725,7 +725,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
# Assert src_data_set is removed
cmd_result = hosts.all.shell(cmd = f"dls {src_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{src_data_set}'")
for c_result in cmd_result.contacted.values():
assert f"BGYSC1103E No datasets match pattern: {src_data_set}." in c_result.get("stderr")
finally:
Expand Down Expand Up @@ -797,7 +797,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set)
assert result.get("dest") == archive_data_set
for ds in target_ds_list:
assert ds.get("name") in result.get("archived")
cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")

for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
Expand Down Expand Up @@ -876,7 +876,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo
else:
assert ds.get("name") in result.get("archived")

cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
finally:
Expand Down Expand Up @@ -947,7 +947,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, ds_format
for result in archive_result.contacted.values():
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*")
cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'")

for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
Expand Down Expand Up @@ -1034,7 +1034,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form
else:
assert ds.get("name") in result.get("archived")

cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")

Expand Down Expand Up @@ -1129,7 +1129,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da
assert result.get("dest") == archive_data_set
assert src_data_set in result.get("archived")

cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")

Expand Down Expand Up @@ -1312,7 +1312,7 @@ def test_mvs_archive_single_dataset_encoding(
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert src_data_set in result.get("archived")
cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")
finally:
Expand Down Expand Up @@ -1386,7 +1386,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding(ansible_zos_module, ds_fo
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert ds_name in result.get("archived")
cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")

Expand Down Expand Up @@ -1465,7 +1465,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert ds_name in result.get("archived")
cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")

Expand Down Expand Up @@ -1585,7 +1585,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib
assert result.get("changed") is True
assert result.get("dest") == archive_data_set
assert ds_name in result.get("archived")
cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}")
cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'")
for c_result in cmd_result.contacted.values():
assert archive_data_set in c_result.get("stdout")

Expand Down
8 changes: 4 additions & 4 deletions tests/functional/modules/test_zos_encode_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,8 +362,8 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module):
hosts = ansible_zos_module
mvs_ps = get_tmp_ds_name()
hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq")
hosts.all.copy(content=TEST_DATA, dest=mvs_ps)
hosts.all.copy(content="test", dest=uss_dest_file)
hosts.all.zos_copy(content=TEST_DATA, dest=mvs_ps)
hosts.all.zos_copy(content="test", dest=uss_dest_file)
results = hosts.all.zos_encode(
src=mvs_ps,
dest=uss_dest_file,
Expand Down Expand Up @@ -477,8 +477,8 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module):
hosts.all.zos_data_set(
name=mvs_pds_member, type="member", state="present"
)
hosts.all.copy(content=TEST_DATA, dest=mvs_pds_member)
hosts.all.copy(content="test", dest=uss_dest_file)
hosts.all.zos_copy(content=TEST_DATA, dest=mvs_pds_member)
hosts.all.zos_copy(content="test", dest=uss_dest_file)
results = hosts.all.zos_encode(
src=mvs_pds_member,
dest=uss_dest_file,
Expand Down