diff --git a/PantheonCMD/__init__.py b/PantheonCMD/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/PantheonCMD/assemblies_nesting.py b/PantheonCMD/assemblies_nesting.py new file mode 100644 index 0000000..b825517 --- /dev/null +++ b/PantheonCMD/assemblies_nesting.py @@ -0,0 +1,37 @@ +#!/usr/bin/python3 + +import subprocess +from validation.pcmsg import Report, print_report_message +from validation.pcentrypointvalidator import get_exist, get_level_four_assemblies + + +def get_assemblies(): + command = ("find rhel-8/assemblies/ -type f -name '*.adoc'") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + assemblie = process.strip().decode('utf-8').split('\n') + + return assemblie + + +def validate_nesting(report, assemblie_found): + existing_entry_points = get_exist(assemblie_found) + level_four_assemblies = get_level_four_assemblies(existing_entry_points) + + for item in level_four_assemblies: + report.create_report('nesting in assemlies. nesting', item) + + +def validate_assemblies(): + report = Report() + assemblies = get_assemblies() + validate_nesting(report, assemblies) + + return report + + +def trytry(): + a = validate_assemblies() + print_report_message(a, 'pantheon2.yml') + + +trytry() diff --git a/PantheonCMD/current_pcentrypointvalidator.py b/PantheonCMD/current_pcentrypointvalidator.py new file mode 100644 index 0000000..e16ccb7 --- /dev/null +++ b/PantheonCMD/current_pcentrypointvalidator.py @@ -0,0 +1,164 @@ +#!/usr/bin/python3 + +from pcutil import get_not_exist +from pcmsg import print_message +import sys +import os +import re +from pcchecks import Regex +from pcprvalidator import get_all_assemblies, get_all_modules, get_no_prefix_files, get_undetermined_files +import glob + + +from pcmsg import print_report_message +from pcvalidator import validation + + +def get_nonexisting_entry_points(entry_point_list): + nonexistent_files = get_not_exist(entry_point_list) + + if nonexistent_files: + print_message(nonexistent_files, 'entry point', 'does not exist in your repository') + sys.exit(2) + + +def get_full_path_to_includes_with_attributes(files): + wildcard_sub = [] + full_path = [] + + for item in files: + attribute = re.findall(Regex.ATTRIBUTE, item) + if attribute: + replace = re.sub(Regex.ATTRIBUTE, "**", item) + + wildcard_sub.append(replace) + + for i in wildcard_sub: + full_path.append(glob.glob(i, recursive=True)) + + return full_path + + +def get_unique_entries(list): + unique = [] + for file in list: + if file in unique: + continue + if any(os.path.samefile(file, item) for item in unique): + continue + unique.append(file) + + return unique + + +def get_includes(files): + """Retreives full paths to included files from an entry point.""" + includes_with_attributes = [] + path_to_includes_with_attributes = [] + includes_found = [] + includes_not_found = {} + unique_entries_includes_with_attributes = [] + + for entry in files: + path_to_entry_point = os.path.dirname(os.path.abspath(entry)) + + # check existence + + with open(entry, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + included_files = re.findall(Regex.INCLUDED_CONTENT, stripped) + + if included_files: + + for include in included_files[:]: + itemized_path = include.split(os.sep) + + attribute_in_path = False + attribute_file = False + + for item in itemized_path: + if item.startswith('_'): + attribute_file = True + included_files.remove(include) + break + if item.startswith('{'): + attribute_in_path = True + included_files.remove(include) + includes_with_attributes.append(os.path.join(path_to_entry_point, include)) + break + + for include in included_files: + full_path = os.path.join(path_to_entry_point, include) + if os.path.exists(full_path): + includes_found.append(full_path) + else: + includes_not_found.setdefault(entry, {})[include] = 1 + + if includes_with_attributes: + + includes_with_attributes = get_full_path_to_includes_with_attributes(includes_with_attributes) + + for i in includes_with_attributes: + unique_entries_includes_with_attributes.append(get_unique_entries(i)) + + return includes_found, includes_not_found, unique_entries_includes_with_attributes + + +def get_includes_recursively(files): + + lvl_1_includes_found, lvl_1_includes_not_found, lvl_1_includes_with_attributes = get_includes(files) + + lvl_2_includes_found, lvl_2_includes_not_found, lvl_2_includes_with_attributes = get_includes(lvl_1_includes_found) + + lvl_3_includes_found, lvl_3_includes_not_found, lvl_3_includes_with_attributes = get_includes(lvl_2_includes_found) + + lvl_4_includes_found, lvl_4_includes_not_found, lvl_4_includes_with_attributes = get_includes(lvl_3_includes_found) + + includes_found = lvl_1_includes_found + lvl_2_includes_found + lvl_3_includes_found + lvl_4_includes_found + + #includes_not_found = lvl_1_includes_not_found + lvl_2_includes_not_found + lvl_3_includes_not_found + lvl_3_includes_not_found + lvl_4_includes_not_found + + includes_not_found = {**lvl_1_includes_not_found , **lvl_2_includes_not_found, **lvl_3_includes_not_found, **lvl_4_includes_not_found} + + includes_with_attributes = lvl_1_includes_with_attributes + lvl_2_includes_with_attributes + lvl_3_includes_with_attributes + lvl_4_includes_with_attributes + includes_with_attributes = [j for i in includes_with_attributes for j in i] + + if includes_with_attributes: + includes_with_attributes = get_unique_entries(includes_with_attributes) + + + # only valid for entry point + for file in files: + file_name = os.path.basename(file) + if not file_name == 'master.adoc': + includes_found.append(file) + + return includes_found, includes_not_found, includes_with_attributes + + +def validate_entry_point_files(entry_point_list): + get_nonexisting_entry_points(entry_point_list) + + all_includes_found, all_includes_not_found, all_includes_with_attributes = get_includes_recursively(entry_point_list) + + no_prefix_files = get_no_prefix_files(all_includes_found) + + all_assemblies = get_all_assemblies(all_includes_found, no_prefix_files) + all_modules = get_all_modules(all_includes_found, no_prefix_files) + all_undetermined_files = get_undetermined_files(no_prefix_files) + + for i in all_includes_with_attributes: + print(i) + + '''if all_includes_not_found: + for key, value in all_includes_not_found.items(): + print(f'{os.path.basename(key)} contains the following includes that do not exist in your repository:') + for v in value: + print('\t', v) + + validate = validation(all_includes_found, all_modules, all_assemblies) + + print_report_message(validate, 'entry point')''' diff --git a/PantheonCMD/old_entry.py b/PantheonCMD/old_entry.py new file mode 100644 index 0000000..a6bd42b --- /dev/null +++ b/PantheonCMD/old_entry.py @@ -0,0 +1,126 @@ +#!/usr/bin/python3 + +import argparse +import re +import os +from pcchecks import Regex +import sys +from pcutil import get_exist, get_not_exist +from pcprvalidator import get_no_prefix_files, get_all_modules, get_all_assemblies, get_undetermined_files +from pcvalidator import validation +from pcmsg import print_message, print_report_message + +parser = argparse.ArgumentParser() + + +def get_nonexisting_entry_points(entry_point_list): + nonexistent_files = get_not_exist(entry_point_list) + + if nonexistent_files: + print_message(nonexistent_files, 'entry point', 'does not exist in your repository') + sys.exit(2) + + +def get_includes(entry_points): + path_to_includes = [] + + for entry in entry_points: + path_to_entry_point = os.path.dirname(os.path.abspath(entry)) + + with open(entry, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + included_files = re.findall(Regex.INCLUDED_CONTENT, stripped) + + if included_files: + + for include in included_files[:]: + if include.startswith('_'): + included_files.remove(include) + + for i in included_files: + path_to_includes.append(os.path.join(path_to_entry_point, i)) + + return path_to_includes + + +def get_level_one_includes(files): + path_to_level_one_includes = get_includes(files) + + return path_to_level_one_includes + + +def get_level_two_includes(files): + path_to_level_two_includes = get_includes(files) + + return path_to_level_two_includes + + +def get_level_three_includes(files): + path_to_level_three_includes = get_includes(files) + + return path_to_level_three_includes + + +def get_level_four_includes(files): + path_to_level_four_includes = get_includes(files) + + return path_to_level_four_includes + + +def get_concatenated_includes(entry_point_list): + existing_entry_points = get_exist(entry_point_list) + level_one_includes = get_level_one_includes(existing_entry_points) + level_two_includes = get_level_two_includes(level_one_includes) + level_three_includes = get_level_three_includes(level_two_includes) + level_four_includes = get_level_four_includes(level_three_includes) + no_prefix_level_four_includes = get_no_prefix_files(level_four_includes) + level_four_modules = get_all_modules(level_four_includes, no_prefix_level_four_includes) + level_four_assemblies = get_all_assemblies(level_four_includes, no_prefix_level_four_includes) + + all_includes = level_one_includes + level_two_includes + level_three_includes + level_four_modules + + return all_includes, level_four_assemblies + + +def get_level_four_assemblies(entry_point_list): + all_includes, level_four_assemblies = get_concatenated_includes(entry_point_list) + + return level_four_assemblies + + +def get_all_includes(entry_point_list): + all_includes, level_four_assemblies = get_concatenated_includes(entry_point_list) + + for entry in entry_point_list: + if not entry.endswith('master.adoc'): + all_includes = all_includes + entry_point_list + + for include in all_includes: + if os.path.basename(include).startswith('_'): + all_includes.remove(include) + + return all_includes + + +def validate_entry_point_files(entry_point_list): + # exit if entry point doesn't exist + get_nonexisting_entry_points(entry_point_list) + existing_entry_points = get_exist(entry_point_list) + includes = get_all_includes(entry_point_list) + no_prefix_files = get_no_prefix_files(includes) + modules_found = get_all_modules(includes, no_prefix_files) + assemblies_found = get_all_assemblies(includes, no_prefix_files) + undetermined_file_type = get_undetermined_files(no_prefix_files) + level_four_assemblies = get_level_four_assemblies(existing_entry_points) + + if level_four_assemblies: + print_message(level_four_assemblies, 'entry point', 'contains unsupported level of nesting for the following files') + + if undetermined_file_type: + print_message(undetermined_file_type, 'entry point', 'contains the following files that can not be classified as modules or assemblies') + + validate = validation(includes, modules_found, assemblies_found) + print_report_message(validate, 'entry point') diff --git a/PantheonCMD/pcbuild.py b/PantheonCMD/pcbuild.py index 32938ba..fc9d566 100644 --- a/PantheonCMD/pcbuild.py +++ b/PantheonCMD/pcbuild.py @@ -117,7 +117,7 @@ def coalesce_document(main_file, attributes=None, depth=0, top_level=True): attributes = attributes or {} comment_block = False lines = [] - + # Create a copy of global attributes if top_level: attributes_global = attributes.copy() diff --git a/PantheonCMD/pcchecks.py b/PantheonCMD/pcchecks.py index c1691c8..0ef98f1 100644 --- a/PantheonCMD/pcchecks.py +++ b/PantheonCMD/pcchecks.py @@ -17,8 +17,11 @@ class Tags: class Regex: """Define regular expresiions for the checks.""" + ATTRIBUTE = re.compile(r'{.*?}') INCLUDE = re.compile(r'include::.*\]\n') + INCLUDED_CONTENT = re.compile(r'(?<=include::).*?(?=\[)') MODULE_TYPE = re.compile(r':_module-type: (PROCEDURE|CONCEPT|REFERENCE)') + CONTENT_TYPE = re.compile(r':_content-type: (PROCEDURE|CONCEPT|REFERENCE|ASSEMBLY)') PREFIX_ASSEMBLIES = re.compile(r'.*\/assembly.*\.adoc') PREFIX_MODULES = re.compile(r'.*\/con.*\.adoc|.*\/proc.*\.adoc|.*\/ref.*\.adoc') # should exclude pseudo vanilla like <> diff --git a/PantheonCMD/pcentrypointvalidator.py b/PantheonCMD/pcentrypointvalidator.py new file mode 100644 index 0000000..ebd8b7a --- /dev/null +++ b/PantheonCMD/pcentrypointvalidator.py @@ -0,0 +1,87 @@ +#!/usr/bin/python3 + +from pcutil import get_not_exist +from pcmsg import print_message +import sys +import os +import re +from pcchecks import Regex +from pcprvalidator import get_all_assemblies, get_all_modules, get_no_prefix_files, get_undetermined_files +import glob + + +from pcmsg import print_report_message +from pcvalidator import validation + + +def get_nonexisting_entry_points(entry_point_list): + nonexistent_entry_point = [] + + for entry in entry_point_list: + if not os.path.isfile(entry): + nonexistent_entry_point.append(entry) + + if nonexistent_entry_point: + print_message(nonexistent_entry_point, 'entry point', 'does not exist in your repository') + sys.exit(2) + + +def remove_attribute_files(files): + for file in files[:]: + itemized_path = file.split(os.sep) + + attribute_file = False + + for item in itemized_path: + if item.startswith('_'): + attribute_file = True + files.remove(file) + break + + return files + + +def sub_attributes_in_path(files): + + files = [re.sub(Regex.ATTRIBUTE, "**", file) for file in files] + + wildcard = re.compile(r'[*?\[\]]') + content_files = [] + + for item in files: + if wildcard.search(item): + expanded_items = glob.glob(item) + if expanded_items: + for expanded_item in expanded_items: + content_files.append(expanded_item) + else: + continue + else: + content_files.append(item) + + return content_files + + +def smth(entry_point_list): + for entry in entry_point_list: + path_to_entry_point = os.path.dirname(os.path.abspath(entry)) + + with open(entry, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + included_files = re.findall(Regex.INCLUDED_CONTENT, stripped) + + if included_files: + included_files = remove_attribute_files(included_files) + included_files = sub_attributes_in_path(included_files) + + for include in included_files: + print(include) + + +def validate_entry_point_files(entry_point_list): + # exit if the provided path doesn't exist + get_nonexisting_entry_points(entry_point_list) + smth(entry_point_list) diff --git a/PantheonCMD/pcmd.py b/PantheonCMD/pcmd.py index 35dda17..88da372 100644 --- a/PantheonCMD/pcmd.py +++ b/PantheonCMD/pcmd.py @@ -8,11 +8,12 @@ import sys from pcutil import PantheonRepo, get_not_exist, get_exist, is_pantheon_repo -from pcvalidator import validation +from pcvalidator import validate_build_files from pcyamlchecks import yaml_validation from subprocess import call -from pcprvalidator import get_changed_files, get_all_modules, get_all_assemblies, get_undetermined_files, get_no_prefix_files - +from pcprvalidator import validate_merge_request_files +from pcentrypointvalidator import validate_entry_point_files +from pcmsg import print_message, print_report_message def print_header(): @@ -47,6 +48,8 @@ def parse_args(): # 'Validate' command parser_d = subparsers.add_parser('validate', help='Validate entries in your pantheon2.yml file.') parser_d.add_argument('--mr', action='store_true', help='Validate files commited on a merge request.') + parser_d.add_argument('--e', nargs=1, help='Validate files from an entry point.') + parser_d.add_argument('--a', help='The attributes file.') # 'Generate' command parser_e = subparsers.add_parser('generate', help='Generate pantheon2.yml file from a template.') @@ -76,66 +79,31 @@ def parse_args(): # validate modules and assemblies elif args.command == 'validate': - if args.mr: - - changed_files = get_changed_files() - files_found = get_exist(changed_files) - no_prefix_files = get_no_prefix_files(files_found) - modules_found = get_all_modules(files_found, no_prefix_files) - assemblies_found = get_all_assemblies(files_found, no_prefix_files) - undetermined_file_type = get_undetermined_files(no_prefix_files) - - if undetermined_file_type: - print("\nYour Merge Request contains the following files that can not be classified as modules or assemblies:\n") + # user provides paths to files that are relative to current pwd - for file in undetermined_file_type: + if args.a: + if not args.e: + print('ERROR: --a option has to be used together with --e option.') + sys.exit(1) - print('\t' + file) + if args.e: + if args.a: + attribute_files = args.a + entry_point_list = args.e + validate_entry_point_files(entry_point_list) - print("\nTotal: ", str(len(undetermined_file_type))) + elif args.mr: - validate = validation(files_found, modules_found, assemblies_found) + validate_merge_request_files() - if validate.count != 0: - print("\nYour Merge Request contains the following files that did not pass validation:\n") - validate.print_report() - sys.exit(2) - else: - print("All files passed validation.") - sys.exit(0) else: - pantheon_repo = PantheonRepo(repo_location) - if os.path.exists('pantheon2.yml'): # call yaml file validation + attribute file validation yaml_validation('pantheon2.yml') - exists = get_not_exist(pantheon_repo.get_content()) - - if exists: - - print("\nYour pantheon2.yml contains the following files that do not exist in your repository:\n") - - for exist in exists: - - print('\t' + exist) - - print("\nTotal: ", str(len(exists))) - - files_found = get_exist(pantheon_repo.get_content()) - modules_found = pantheon_repo.get_existing_content("modules") - assemblies_found = pantheon_repo.get_existing_content("assemblies") - - validate = validation(files_found, modules_found, assemblies_found) - - if validate.count != 0: - print("\nYour pantheon2.yml contains the following files that did not pass validation:\n") - validate.print_report() - sys.exit(2) - else: - print("All files passed validation.") + validate_build_files() else: @@ -154,15 +122,11 @@ def parse_args(): # Action - preview if args.command == 'preview': - # Validate the pantheon2.yml file - yaml_validation(pantheon_repo.yaml_file_location) - - # Set the output format if args.format == 'pdf': output_format = 'pdf' else: output_format = 'html' - + # Did a user specify a set of files? If so, only build those. if args.files: # Handle different interpretations of directories @@ -215,13 +179,7 @@ def parse_args(): duplicates = pantheon_repo.get_duplicates() if duplicates: - - print("Your pantheon2.yml contains the following duplicate entries:\n") - - for duplicate in duplicates: - print(duplicate) - - print("\nTotal: ", str(len(duplicates))) + print_message(duplicates, 'pantheon2.yml', 'contains the following duplicate entries') else: diff --git a/PantheonCMD/pcmsg.py b/PantheonCMD/pcmsg.py new file mode 100644 index 0000000..c371f31 --- /dev/null +++ b/PantheonCMD/pcmsg.py @@ -0,0 +1,45 @@ +#!/usr/bin/python3 + +import sys + + +class Report(): + """Create and print report. thank u J.""" + + def __init__(self): + """Create placeholder for problem description.""" + self.report = {} + self.count = 0 + + def create_report(self, category, file_path): + """Generate report.""" + self.count += 1 + if not category in self.report: + self.report[category] = [] + self.report[category].append(file_path) + + def print_report(self): + + """Print report.""" + separator = "\n\t" + + for category, files in self.report.items(): + print("\nERROR: {} found in the following files:".format(category)) + print('\t' + separator.join(files)) + + +def print_message(variable, specification, msg): + print(f'\nYour {specification} {msg}:\n') + for var in variable: + print('\t', var) + print("\nTotal: ", str(len(variable))) + + +def print_report_message(variable, specification): + if variable.count != 0: + print(f"\nYour {specification} contains the following files that did not pass validation:\n") + variable.print_report() + sys.exit(2) + else: + print("All files passed validation.") + sys.exit(0) diff --git a/PantheonCMD/pcprvalidator.py b/PantheonCMD/pcprvalidator.py index adc7873..8bc414a 100644 --- a/PantheonCMD/pcprvalidator.py +++ b/PantheonCMD/pcprvalidator.py @@ -1,25 +1,38 @@ #!/usr/bin/python3 import subprocess -from pygit2 import Repository import os import sys -import subprocess import re from pcchecks import Regex +from pcvalidator import validation +from pcmsg import print_message, print_report_message +from pcutil import get_exist if subprocess.call(["git", "branch"], stderr=subprocess.STDOUT, stdout=open(os.devnull, 'w')) != 0: print('Not a git repository; existing...') sys.exit(1) else: - current_branch = Repository('.').head.shorthand + command = ("git rev-parse --abbrev-ref HEAD") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + current_branch = process.strip().decode('utf-8').split('\n') + current_branch = ' '.join([str(elem) for elem in current_branch]) + + +def get_mr(): + if current_branch == 'master': + print('On master. Exiting...') + sys.exit(1) + elif current_branch == 'main': + print('On main. Exiting...') + sys.exit(1) def get_changed_files(): """Return a list of the files that werre change on the PR.""" - command = ("git diff --diff-filter=ACM --name-only origin/HEAD..." + current_branch + " -- ':!*master.adoc' | xargs -I '{}' realpath --relative-to=. $(git rev-parse --show-toplevel)/'{}' | grep '.*\.adoc'") + command = ("git diff --diff-filter=ACM --name-only origin/HEAD..." + str(current_branch) + " -- ':!*master.adoc' | xargs -I '{}' realpath --relative-to=. $(git rev-parse --show-toplevel)/'{}' | grep '.*\.adoc'") process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout changed_files = process.strip().decode('utf-8').split('\n') @@ -31,7 +44,8 @@ def get_prefix_assemblies(files_found): prefix_assembly_files = [] for file in files_found: - if re.findall(Regex.PREFIX_ASSEMBLIES, file): + file_name = os.path.basename(file) + if file_name.startswith('assembly'): prefix_assembly_files.append(file) return(sorted(prefix_assembly_files, key=str.lower)) @@ -42,7 +56,8 @@ def get_prefix_modules(files_found): prefix_module_files = [] for file in files_found: - if re.findall(Regex.PREFIX_MODULES, file): + file_name = os.path.basename(file) + if file_name.startswith(('proc', 'con', 'ref')): prefix_module_files.append(file) return(sorted(prefix_module_files, key=str.lower)) @@ -53,7 +68,8 @@ def get_no_prefix_files(files_found): no_prefix_files = [] for file in files_found: - if not re.findall(Regex.PREFIX_ASSEMBLIES, file) and not re.findall(Regex.PREFIX_MODULES, file): + file_name = os.path.basename(file) + if not file_name.startswith(('proc', 'con', 'ref', 'assembly')): no_prefix_files.append(file) return no_prefix_files @@ -74,13 +90,15 @@ def get_no_prefefix_file_type(no_prefix_files): stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) stripped = Regex.INTERNAL_IFDEF.sub('', stripped) - if re.findall(Regex.MODULE_TYPE, stripped): + content_type = re.findall(Regex.CONTENT_TYPE, original) + + if content_type in (['PROCEDURE'], ['CONCEPT'], ['REFERENCE']): no_prefix_module_type.append(path) - if re.findall(Regex.INCLUDE, stripped): + if content_type == ['ASSEMBLY']: no_prefix_assembly_type.append(path) - if not re.findall(Regex.MODULE_TYPE, stripped) and not re.findall(Regex.INCLUDE, stripped): + if not content_type: undetermined_file_type.append(path) return no_prefix_module_type, no_prefix_assembly_type, undetermined_file_type @@ -121,3 +139,20 @@ def get_undetermined_files(no_prefix_files): no_prefix_module_type, no_prefix_assembly_type, undetermined_file_type = get_no_prefefix_file_type(no_prefix_files) return(sorted(undetermined_file_type, key=str.lower)) + + +def validate_merge_request_files(): + get_mr() + changed_files = get_changed_files() + files_found = get_exist(changed_files) + no_prefix_files = get_no_prefix_files(files_found) + modules_found = get_all_modules(files_found, no_prefix_files) + assemblies_found = get_all_assemblies(files_found, no_prefix_files) + undetermined_file_type = get_undetermined_files(no_prefix_files) + + if undetermined_file_type: + print_message(undetermined_file_type, 'Merge Request', 'contains the following files that can not be classified as modules or assemblies') + + validate = validation(files_found, modules_found, assemblies_found) + + print_report_message(validate, 'Merge Request') diff --git a/PantheonCMD/pcvalidator.py b/PantheonCMD/pcvalidator.py index 31f32b4..43f3002 100644 --- a/PantheonCMD/pcvalidator.py +++ b/PantheonCMD/pcvalidator.py @@ -2,31 +2,8 @@ from pcchecks import Regex, checks, nesting_in_modules_check, nesting_in_assemblies_check, add_res_section_module_check, add_res_section_assembly_check, icons_check, toc_check import sys - - -class Report(): - """Create and print report. thank u J.""" - - def __init__(self): - """Create placeholder for problem description.""" - self.report = {} - self.count = 0 - - def create_report(self, category, file_path): - """Generate report.""" - self.count += 1 - if not category in self.report: - self.report[category] = [] - self.report[category].append(file_path) - - def print_report(self): - - """Print report.""" - separator = "\n\t" - - for category, files in self.report.items(): - print("\nERROR: {} found in the following files:".format(category)) - print('\t' + separator.join(files)) +from pcmsg import print_message, print_report_message, Report +from pcutil import get_not_exist, get_exist, PantheonRepo, is_pantheon_repo def validation(files_found, modules_found, assemblies_found): @@ -68,3 +45,21 @@ def validation(files_found, modules_found, assemblies_found): add_res_section_assembly_check(report, stripped, path) return report + + +def validate_build_files(): + repo_location = is_pantheon_repo() + pantheon_repo = PantheonRepo(repo_location) + + exists = get_not_exist(pantheon_repo.get_content()) + + if exists: + print_message(exists, 'pantheon2.yml', 'contains the following files that do not exist in your repositor') + + files_found = get_exist(pantheon_repo.get_content()) + modules_found = pantheon_repo.get_existing_content("modules") + assemblies_found = pantheon_repo.get_existing_content("assemblies") + + validate = validation(files_found, modules_found, assemblies_found) + + print_report_message(validate, 'pantheon2.yml') diff --git a/PantheonCMD/pcyamlchecks.py b/PantheonCMD/pcyamlchecks.py index 4df0687..6ba5468 100644 --- a/PantheonCMD/pcyamlchecks.py +++ b/PantheonCMD/pcyamlchecks.py @@ -6,7 +6,7 @@ from cerberus import Validator, errors from cerberus.errors import BasicErrorHandler from pcchecks import Regex, icons_check, toc_check -from pcvalidator import Report +from pcmsg import Report import glob @@ -58,9 +58,9 @@ def get_yaml_errors(yaml_schema, yaml_doc): v.validate(yaml_doc, yaml_schema) if v.errors: - print("FAIL: there is an error in your yaml file:") - for key in v.errors.keys(): - print("\n\t'{}' {}".format(key, ', '.join(str(item) for item in v.errors[key]))) + print("ERROR: Your pantheon2.yml contains the following errors:") + for key in v.errors: + print('\t', key, v.errors[key]) sys.exit(2) else: @@ -80,7 +80,7 @@ def get_yaml_errors(yaml_schema, yaml_doc): path_exists.append(variant['path']) if path_does_not_exist: - print('FAIL: Your pantheon2.yml contains the following files or directories that do not exist in your repository:\n') + print('ERROR: Your pantheon2.yml contains the following files or directories that do not exist in your repository:\n') for path in path_does_not_exist: print('\t', path) sys.exit(2) diff --git a/PantheonCMD/validation/assemblies_nesting.py b/PantheonCMD/validation/assemblies_nesting.py new file mode 100644 index 0000000..0533c97 --- /dev/null +++ b/PantheonCMD/validation/assemblies_nesting.py @@ -0,0 +1,37 @@ +#!/usr/bin/python3 + +import subprocess +from validation.pcmsg import Report, print_report_message +from validation.pcentrypointvalidator import get_exist, get_level_four_assemblies + + +def get_assemblies(): + command = ("find rhel-8/assemblies/ -type f -name '*.adoc'") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + assemblie = process.strip().decode('utf-8').split('\n') + + return assemblie + + +def validate_nesting(report, assemblie_found): + existing_entry_points = get_exist(assemblie_found) + level_four_assemblies = get_level_four_assemblies(existing_entry_points) + + for item in level_four_assemblies: + report.create_report('nesting in assemblies. nesting', item) + + +def validate_assemblies(): + report = Report() + assemblies = get_assemblies() + validate_nesting(report, assemblies) + + return report + + +def trytry(): + a = validate_assemblies() + print_report_message(a, 'pantheon2.yml') + + +trytry() diff --git a/PantheonCMD/validation/modules_nesting.py b/PantheonCMD/validation/modules_nesting.py new file mode 100644 index 0000000..16cef77 --- /dev/null +++ b/PantheonCMD/validation/modules_nesting.py @@ -0,0 +1,75 @@ +#!/usr/bin/python3 + +import re +import os +import subprocess +from pcchecks import Regex +from pcmsg import Report, print_report_message + + +def get_modules(): + command = ("find rhel-8/modules/identity-management/ -type f -name '*.adoc'") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + modules = process.strip().decode('utf-8').split('\n') + + return modules + + +def validate_nesting(modules_found): + nesting_prefix = [] + nesting_no_prefix = [] + + for path in modules_found: + with open(path, "r") as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + includes = re.findall(Regex.INCLUDED_CONTENT, stripped) + + if includes: + for item in includes: + filename = os.path.basename(item) + if filename.startswith(("assembly", "con", "proc", "ref")): + nesting_prefix.append(path) + else: + path_to_include = os.path.dirname(path) + item = path_to_include + '/' + item + + with open(item, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + if re.findall(Regex.CONTENT_TYPE, stripped): + nesting_no_prefix.append(path) + + return nesting_prefix + nesting_no_prefix + + +def get_unique_nested_content(report, modules_found): + unique_nested_content = [] + + nested_content = validate_nesting(modules_found) + + for item in nested_content: + if item not in unique_nested_content: + unique_nested_content.append(item) + + report.create_report('nesting in modules. nesting', item) + + +def validate_modules(): + report = Report() + modules = get_modules() + get_unique_nested_content(report, modules) + + return report + + +def trytry(): + a = validate_modules() + print_report_message(a, 'pantheon2.yml') + + +trytry() diff --git a/PantheonCMD/validation/try.py b/PantheonCMD/validation/try.py new file mode 100644 index 0000000..a399f5b --- /dev/null +++ b/PantheonCMD/validation/try.py @@ -0,0 +1,60 @@ +#!/usr/bin/python3 + +import subprocess +import re +from pcchecks import Regex + + +def get_adoc_files(): + command = ("find . -type f -name '*.adoc'") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + adoc_files = process.strip().decode('utf-8').split('\n') + + return adoc_files + + + +path = 'rhel-8/common-content/_attributes.adoc' + +with open(path, 'r') as file: + attribute_list = [] + for line in file: + line = file.readline() + if line.startswith(r':'): + attributes = re.findall(Regex.ATTRIBUTE, line) + for attribute in attributes: + if attribute not in attribute_list: + attribute_list.append(attribute) + + + +def get_attributes(stripped_file): + all_attributes = [] + attributes = re.findall(Regex.ATTRIBUTE, stripped_file) + for attribute in attributes: + if attribute not in all_attributes: + all_attributes.append(attribute) + + return attributes + + +def get_attributes_list(adoc_files_found): + attribute_list = [] + + for item in adoc_files_found: + with open(item, 'r') as file: + attribute_list = [] + + for line in file: + line = file.readline() + if line.startswith(r':'): + attributes = re.findall(Regex.ATTRIBUTE, line) + for attribute in attributes: + if attribute not in attribute_list: + attribute_list.append(attribute) + + return attributes + + +adoc_files = get_adoc_files() +print(get_attributes_list(adoc_files)) diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 0000000..b8b18a1 --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +cd PantheonCMD && python3 -m unittest discover .. -b diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/fixtures/file.adoc b/test/fixtures/file.adoc new file mode 100644 index 0000000..e69de29 diff --git a/test/fixtures/other-file.adoc b/test/fixtures/other-file.adoc new file mode 100644 index 0000000..e69de29 diff --git a/test/test_pcentrypointvalidator.py b/test/test_pcentrypointvalidator.py new file mode 100644 index 0000000..a7a64e6 --- /dev/null +++ b/test/test_pcentrypointvalidator.py @@ -0,0 +1,80 @@ + +import unittest +import os +import glob + +from PantheonCMD.pcentrypointvalidator import * + + +class TestGetNonexistentEntryPoint(unittest.TestCase): + def setUp(self): + self.current_path = os.path.dirname(__file__) + self.fixtures_path = os.path.join(self.current_path, "fixtures") + + def test_fake_path(self): + file_name ="/some/fake/path.adoc" + + with self.assertRaises(SystemExit) as cm: + get_nonexisting_entry_points(file_name) + + self.assertEqual(cm.exception.code, 2) + + def test_real_path(self): + files = glob.glob(self.fixtures_path + '/*.adoc') + + try: + get_nonexisting_entry_points(files) + except ExceptionType: + self.fail("get_nonexisting_entry_points() raised ExceptionType unexpectedly!") + + +class TestRemoveAttributeFiles(unittest.TestCase): + def test_underscore_filename(self): + files = ['some-file.adoc', '_attribute.adoc'] + + result = remove_attribute_files(files) + self.assertEqual(result, ['some-file.adoc']) + + def test_undescore_directory(self): + files = ['some-file.adoc', '_dir/attribute.adoc'] + + result = remove_attribute_files(files) + self.assertEqual(result, ['some-file.adoc']) + + def test_undescore_files_and_dir(self): + files = ['some-file.adoc', '_dir/attribute.adoc', '_attribute.adoc'] + + result = remove_attribute_files(files) + self.assertEqual(result, ['some-file.adoc']) + + def test_no_undescore(self): + files = ['some-file.adoc'] + + result = remove_attribute_files(files) + self.assertEqual(result, ['some-file.adoc']) + + def test_all_undescore(self): + files = ['_dir/attribute.adoc', '_attribute.adoc'] + + result = remove_attribute_files(files) + self.assertEqual(result, []) + + def test_empty_list(self): + files = [] + + result = remove_attribute_files(files) + self.assertEqual(result, []) + + +class TestSubAttributesInPath(unittest.TestCase): + def setUp(self): + self.current_path = os.path.dirname(__file__) + + def attribute_in_path(self): + file_name = os.path.join(self.fixtures_path, "{attribute}/valid.yml") + + + +# run all the tests in this file +if __name__ == '__main__': + unittest.main()