Skip to content

Commit b9ba931

Browse files
committed
Fix formatting
1 parent 9e3f4ce commit b9ba931

File tree

2 files changed

+61
-61
lines changed

2 files changed

+61
-61
lines changed

cobol-parser/src/main/scala/za/co/absa/cobrix/cobol/reader/extractors/raw/FixedBlockParameters.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,4 +34,4 @@ object FixedBlockParameters {
3434
params.blockLength.foreach(x => if (x < 1) throw new IllegalArgumentException(s"Block length should be positive. Got $x."))
3535
params.recordsPerBlock.foreach(x => if (x < 1) throw new IllegalArgumentException(s"Records per block should be positive. Got $x."))
3636
}
37-
}
37+
}

spark-cobol/src/main/scala/za/co/absa/cobrix/spark/cobol/parameters/CobolParametersParser.scala

Lines changed: 60 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -39,78 +39,78 @@ import scala.collection.mutable.ListBuffer
3939
object CobolParametersParser {
4040
private val logger = LoggerFactory.getLogger(this.getClass)
4141

42-
val SHORT_NAME = "cobol"
43-
val PARAM_COPYBOOK_PATH = "copybook"
44-
val PARAM_MULTI_COPYBOOK_PATH = "copybooks"
45-
val PARAM_COPYBOOK_CONTENTS = "copybook_contents"
46-
val PARAM_SOURCE_PATH = "path"
47-
val PARAM_SOURCE_PATHS = "paths"
48-
val PARAM_ENCODING = "encoding"
49-
val PARAM_PEDANTIC = "pedantic"
50-
val PARAM_RECORD_LENGTH_FIELD = "record_length_field"
51-
val PARAM_RECORD_START_OFFSET = "record_start_offset"
52-
val PARAM_RECORD_END_OFFSET = "record_end_offset"
53-
val PARAM_FILE_START_OFFSET = "file_start_offset"
54-
val PARAM_FILE_END_OFFSET = "file_end_offset"
42+
val SHORT_NAME = "cobol"
43+
val PARAM_COPYBOOK_PATH = "copybook"
44+
val PARAM_MULTI_COPYBOOK_PATH = "copybooks"
45+
val PARAM_COPYBOOK_CONTENTS = "copybook_contents"
46+
val PARAM_SOURCE_PATH = "path"
47+
val PARAM_SOURCE_PATHS = "paths"
48+
val PARAM_ENCODING = "encoding"
49+
val PARAM_PEDANTIC = "pedantic"
50+
val PARAM_RECORD_LENGTH_FIELD = "record_length_field"
51+
val PARAM_RECORD_START_OFFSET = "record_start_offset"
52+
val PARAM_RECORD_END_OFFSET = "record_end_offset"
53+
val PARAM_FILE_START_OFFSET = "file_start_offset"
54+
val PARAM_FILE_END_OFFSET = "file_end_offset"
5555

5656
// Schema transformation parameters
57-
val PARAM_GENERATE_RECORD_ID = "generate_record_id"
58-
val PARAM_SCHEMA_RETENTION_POLICY = "schema_retention_policy"
59-
val PARAM_GROUP_FILLERS = "drop_group_fillers"
60-
val PARAM_VALUE_FILLERS = "drop_value_fillers"
57+
val PARAM_GENERATE_RECORD_ID = "generate_record_id"
58+
val PARAM_SCHEMA_RETENTION_POLICY = "schema_retention_policy"
59+
val PARAM_GROUP_FILLERS = "drop_group_fillers"
60+
val PARAM_VALUE_FILLERS = "drop_value_fillers"
6161

62-
val PARAM_GROUP_NOT_TERMINALS = "non_terminals"
63-
val PARAM_OCCURS_MAPPINGS = "occurs_mappings"
64-
val PARAM_DEBUG = "debug"
62+
val PARAM_GROUP_NOT_TERMINALS = "non_terminals"
63+
val PARAM_OCCURS_MAPPINGS = "occurs_mappings"
64+
val PARAM_DEBUG = "debug"
6565

6666
// General parsing parameters
67-
val PARAM_TRUNCATE_COMMENTS = "truncate_comments"
68-
val PARAM_COMMENTS_LBOUND = "comments_lbound"
69-
val PARAM_COMMENTS_UBOUND = "comments_ubound"
67+
val PARAM_TRUNCATE_COMMENTS = "truncate_comments"
68+
val PARAM_COMMENTS_LBOUND = "comments_lbound"
69+
val PARAM_COMMENTS_UBOUND = "comments_ubound"
7070

7171
// Data parsing parameters
72-
val PARAM_STRING_TRIMMING_POLICY = "string_trimming_policy"
73-
val PARAM_EBCDIC_CODE_PAGE = "ebcdic_code_page"
74-
val PARAM_EBCDIC_CODE_PAGE_CLASS = "ebcdic_code_page_class"
75-
val PARAM_ASCII_CHARSET = "ascii_charset"
76-
val PARAM_IS_UTF16_BIG_ENDIAN = "is_utf16_big_endian"
77-
val PARAM_FLOATING_POINT_FORMAT = "floating_point_format"
78-
val PARAM_VARIABLE_SIZE_OCCURS = "variable_size_occurs"
79-
val PARAM_IMPROVED_NULL_DETECTION = "improved_null_detection"
72+
val PARAM_STRING_TRIMMING_POLICY = "string_trimming_policy"
73+
val PARAM_EBCDIC_CODE_PAGE = "ebcdic_code_page"
74+
val PARAM_EBCDIC_CODE_PAGE_CLASS = "ebcdic_code_page_class"
75+
val PARAM_ASCII_CHARSET = "ascii_charset"
76+
val PARAM_IS_UTF16_BIG_ENDIAN = "is_utf16_big_endian"
77+
val PARAM_FLOATING_POINT_FORMAT = "floating_point_format"
78+
val PARAM_VARIABLE_SIZE_OCCURS = "variable_size_occurs"
79+
val PARAM_IMPROVED_NULL_DETECTION = "improved_null_detection"
8080

8181
// Parameters for multisegment variable length files
82-
val PARAM_RECORD_FORMAT = "record_format"
83-
val PARAM_RECORD_LENGTH = "record_length"
84-
val PARAM_IS_XCOM = "is_xcom"
85-
val PARAM_IS_RECORD_SEQUENCE = "is_record_sequence"
86-
val PARAM_IS_TEXT = "is_text"
87-
val PARAM_IS_RDW_BIG_ENDIAN = "is_rdw_big_endian"
88-
val PARAM_IS_BDW_BIG_ENDIAN = "is_bdw_big_endian"
89-
val PARAM_IS_RDW_PART_REC_LENGTH = "is_rdw_part_of_record_length"
90-
val PARAM_RDW_ADJUSTMENT = "rdw_adjustment"
91-
val PARAM_BDW_ADJUSTMENT = "bdw_adjustment"
92-
val PARAM_BLOCK_LENGTH = "block_length"
93-
val PARAM_RECORDS_PER_BLOCK = "records_per_block"
94-
val PARAM_SEGMENT_FIELD = "segment_field"
95-
val PARAM_SEGMENT_ID_ROOT = "segment_id_root"
96-
val PARAM_SEGMENT_FILTER = "segment_filter"
97-
val PARAM_SEGMENT_ID_LEVEL_PREFIX = "segment_id_level"
98-
val PARAM_RECORD_HEADER_PARSER = "record_header_parser"
99-
val PARAM_RECORD_EXTRACTOR = "record_extractor"
100-
val PARAM_RHP_ADDITIONAL_INFO = "rhp_additional_info"
101-
val PARAM_RE_ADDITIONAL_INFO = "re_additional_info"
102-
val PARAM_INPUT_FILE_COLUMN = "with_input_file_name_col"
82+
val PARAM_RECORD_FORMAT = "record_format"
83+
val PARAM_RECORD_LENGTH = "record_length"
84+
val PARAM_IS_XCOM = "is_xcom"
85+
val PARAM_IS_RECORD_SEQUENCE = "is_record_sequence"
86+
val PARAM_IS_TEXT = "is_text"
87+
val PARAM_IS_RDW_BIG_ENDIAN = "is_rdw_big_endian"
88+
val PARAM_IS_BDW_BIG_ENDIAN = "is_bdw_big_endian"
89+
val PARAM_IS_RDW_PART_REC_LENGTH = "is_rdw_part_of_record_length"
90+
val PARAM_RDW_ADJUSTMENT = "rdw_adjustment"
91+
val PARAM_BDW_ADJUSTMENT = "bdw_adjustment"
92+
val PARAM_BLOCK_LENGTH = "block_length"
93+
val PARAM_RECORDS_PER_BLOCK = "records_per_block"
94+
val PARAM_SEGMENT_FIELD = "segment_field"
95+
val PARAM_SEGMENT_ID_ROOT = "segment_id_root"
96+
val PARAM_SEGMENT_FILTER = "segment_filter"
97+
val PARAM_SEGMENT_ID_LEVEL_PREFIX = "segment_id_level"
98+
val PARAM_RECORD_HEADER_PARSER = "record_header_parser"
99+
val PARAM_RECORD_EXTRACTOR = "record_extractor"
100+
val PARAM_RHP_ADDITIONAL_INFO = "rhp_additional_info"
101+
val PARAM_RE_ADDITIONAL_INFO = "re_additional_info"
102+
val PARAM_INPUT_FILE_COLUMN = "with_input_file_name_col"
103103

104104
// Indexed multisegment file processing
105-
val PARAM_ENABLE_INDEXES = "enable_indexes"
106-
val PARAM_INPUT_SPLIT_RECORDS = "input_split_records"
107-
val PARAM_INPUT_SPLIT_SIZE_MB = "input_split_size_mb"
108-
val PARAM_SEGMENT_ID_PREFIX = "segment_id_prefix"
109-
val PARAM_OPTIMIZE_ALLOCATION = "optimize_allocation"
110-
val PARAM_IMPROVE_LOCALITY = "improve_locality"
105+
val PARAM_ENABLE_INDEXES = "enable_indexes"
106+
val PARAM_INPUT_SPLIT_RECORDS = "input_split_records"
107+
val PARAM_INPUT_SPLIT_SIZE_MB = "input_split_size_mb"
108+
val PARAM_SEGMENT_ID_PREFIX = "segment_id_prefix"
109+
val PARAM_OPTIMIZE_ALLOCATION = "optimize_allocation"
110+
val PARAM_IMPROVE_LOCALITY = "improve_locality"
111111

112112
// Parameters for debugging
113-
val PARAM_DEBUG_IGNORE_FILE_SIZE = "debug_ignore_file_size"
113+
val PARAM_DEBUG_IGNORE_FILE_SIZE = "debug_ignore_file_size"
114114

115115
private def getSchemaRetentionPolicy(params: Parameters): SchemaRetentionPolicy = {
116116
val schemaRetentionPolicyName = params.getOrElse(PARAM_SCHEMA_RETENTION_POLICY, "collapse_root")
@@ -729,4 +729,4 @@ object CobolParametersParser {
729729
val parsedParams = parser.parseMap(params)
730730
parsedParams.map(kv => kv._1 -> kv._2.asInstanceOf[Map[String, Any]].map(x => x._1 -> x._2.asInstanceOf[Int]))
731731
}
732-
}
732+
}

0 commit comments

Comments
 (0)