Last active
August 26, 2025 08:04
-
-
Save phanviet/930325b2f09292c167810f323ccc03ba to your computer and use it in GitHub Desktop.
dfin-tool.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ##### Installation ##### | |
| # 1. Install python 3 from ms store | |
| # 2. Open terminal: pip install gherkin-official | |
| ##### Usage ##### | |
| # 3. Call some functions under section "##### main actions #####" | |
| # 3. Open terminal: python <this-file-name> | |
| import os | |
| import re | |
| import shutil | |
| from functools import reduce | |
| from gherkin.parser import Parser | |
| from itertools import chain | |
| ##### Configuration ##### | |
| PROJECT_DIR: str = "D:/DFSArc-ArcReporting-Automation_BDD/" | |
| ROOT_SRC_FOLDERS = ["arc_reporting", "auth0", "nmfp3", "nmfp", "psp"] | |
| DOTNET_UPGRADE_TAG_SUFFIX: str = "-dotnet" | |
| DOTNET_UPGRADE_PIPELINE_TAGS: list[str] = [ | |
| "nmfp3", | |
| "nmfp", | |
| "auditreports", | |
| "configuration", | |
| "documents", | |
| "libraries", | |
| "tsr_tagging", | |
| "financialworkbook", | |
| "footnotesassignment", | |
| "fundholdings", | |
| "tsr", | |
| "data-load", | |
| "auth0", | |
| "additional_financialworkbook", | |
| "dataloads" | |
| ] | |
| REMOVED_TAGS: list[str] = [ | |
| "lion", | |
| "phoenix", | |
| "areca", | |
| "warheads", | |
| "qa-regression", | |
| "regression", | |
| "hussars", | |
| "lifesavers", | |
| ] | |
| # @nmfp3|@nmfp|@auditreports|@configuration|@documents|@libraries|@tsr_tagging|@financialworkbook|@footnotesassignment|@fundholdings|@tsr | |
| # @lion|@phoenix|@areca|@warhead|@qa-regression|@regression|@hussars|@dotnet_upgrade | |
| # @lion-dotnet|@phoenix-dotnet|@areca-dotnet|@warhead-dotnet|@qa-regression-dotnet|@regression-dotnet|@hussars-dotnet|@dotnet_upgrade | |
| # (@\S+)\s{2,}(@\S+) | |
| ##### Functions ##### | |
| is_exists_path = os.path.exists | |
| is_directory = os.path.isdir | |
| is_file = os.path.isfile | |
| def get_abs_path(file_path: str) -> str: | |
| """Get the absolute file path""" | |
| if is_abs_path(file_path): | |
| return file_path | |
| return PROJECT_DIR + file_path | |
| def is_abs_path(file_path: str) -> bool: | |
| return PROJECT_DIR in file_path | |
| # def get_scenario_names_in_multiple_files(file_paths: list[str]) -> list[str]: | |
| # """Get scenario names in multiple files""" | |
| # abs_file_paths = map(get_abs_path, file_paths) | |
| # scenario_names = map(get_scenario_names, abs_file_paths) | |
| # return reduce(lambda x, y: x + y, scenario_names) | |
| def read_file(abs_file_path: str) -> str: | |
| """Read file from the absolute file path""" | |
| with open(abs_file_path, "r", encoding="utf-8") as file: | |
| content = file.read() | |
| return content | |
| def write_file(abs_file_path: str, content: str): | |
| """Write file""" | |
| with open(abs_file_path, "w", encoding="utf-8") as file: | |
| file.write(content) | |
| def to_gherkin_tag(tag: str) -> str: | |
| return "@{}".format(tag) | |
| def flatten(arr: list[list[any]]) -> list[any]: | |
| return list(chain(*arr)) | |
| def compact(arr: list[any]) -> list[any]: | |
| return list(filter(lambda x: x is not None, arr)) | |
| def get_scenarios_in_rule(gherkin_doc: dict[str, any]) -> list[any]: | |
| try: | |
| features = gherkin_doc["feature"]["children"] | |
| feature_values = flatten(map(lambda f: list(f.values()), features)) | |
| feature_values_in_rule = list( | |
| filter(lambda v: v["keyword"] == "Rule", feature_values) | |
| ) | |
| scenarios = flatten(map(lambda v: v["children"], feature_values_in_rule)) | |
| return compact(list(map(lambda s: s.get("scenario"), scenarios))) | |
| except Exception as e: | |
| print("[ERROR] get_scenarios_in_rule", e) | |
| return [] | |
| def get_scenarios(gherkin_doc: dict[str, any]) -> list[any]: | |
| try: | |
| features = gherkin_doc["feature"]["children"] | |
| feature_values = flatten(map(lambda f: list(f.values()), features)) | |
| return list(filter(lambda v: "Scenario" in v["keyword"], feature_values)) | |
| except Exception as e: | |
| print("[ERROR] get_scenarios", e) | |
| return [] | |
| # def get_scenario_names(file_path: str) -> list[str]: | |
| # """Get scenario names of one file""" | |
| # parser = Parser() | |
| # parsed_data = parser.parse(read_file(file_path)) | |
| # return list(map(lambda s: s["name"], get_all_scenarios(parsed_data))) | |
| def pp(_arr: list[any], _prefix: str = ""): | |
| for _item in _arr: | |
| print(_prefix + _item) | |
| def unique_list(_arr: list[str]) -> list[str]: | |
| return list(dict.fromkeys(_arr)) | |
| def get_all_runner_test_case_tags(file_paths: list[str]) -> str: | |
| """Get all runner tags of multiple files""" | |
| abs_file_paths = map(get_abs_path, file_paths) | |
| names = reduce( | |
| lambda acc, tags: acc + tags, | |
| map(get_all_test_case_tags, abs_file_paths), | |
| [], | |
| ) | |
| unique_names = unique_list(names) | |
| return " | ".join(unique_names) | |
| def has_at_least_2_digits(s: str) -> bool: | |
| return bool(re.search(r"\d.*\d", s)) | |
| def get_all_scenarios(gherkin_doc: any) -> list[any]: | |
| # get scenarios in Rule | |
| scenarios_in_rule = get_scenarios_in_rule(gherkin_doc) | |
| # get scenarios in Scenario (Outline) | |
| scenarios = get_scenarios(gherkin_doc) | |
| return scenarios_in_rule + scenarios | |
| def get_runner_test_case_from_tag_names(tag_names: list[str]) -> str: | |
| runner_test_case_tags = list(filter(has_at_least_2_digits, tag_names)) | |
| if len(runner_test_case_tags) > 0: | |
| return runner_test_case_tags[0][1:] | |
| else: | |
| raise Exception("There is no test case tag.") | |
| def get_runner_test_case_from_tags(tags: list[any]) -> str: | |
| tag_names = map(lambda t: t["name"], tags) | |
| return get_runner_test_case_from_tag_names(tag_names) | |
| def get_all_test_case_tags(file_path: str) -> list[str]: | |
| """Get all runner tags of one file""" | |
| parser = Parser() | |
| # print("@file_path: ", file_path) | |
| parsed_data = parser.parse(read_file(file_path)) | |
| scenarios = get_all_scenarios(parsed_data) | |
| tags = filter(lambda _tags: len(_tags) > 0, map(lambda s: s.get("tags"), scenarios)) | |
| return list(map(get_runner_test_case_from_tags, tags)) | |
| def convert_file_to_dotnet_upgrade(file_path: str) -> str: | |
| """Auto convert feature file to dotnet upgrade feature file""" | |
| # 1. copy file to dotnet upgrade folder | |
| dest_file_path = copy_file_to_dotnet_upgrade(file_path) | |
| # 2. read file and modify it | |
| content = read_file(dest_file_path) | |
| updated_content = _replace_to_dotnet_upgrade_env(content) | |
| updated_content = _add_suffix_to_test_cases( | |
| updated_content, DOTNET_UPGRADE_TAG_SUFFIX | |
| ) | |
| updated_content = _remove_tags(updated_content, map(to_gherkin_tag, REMOVED_TAGS)) | |
| updated_content = _add_suffix_to_tags( | |
| updated_content, | |
| map(to_gherkin_tag, DOTNET_UPGRADE_PIPELINE_TAGS), | |
| DOTNET_UPGRADE_TAG_SUFFIX, | |
| ) | |
| updated_content = _patch_fix_nmfp3_tag(updated_content) | |
| updated_content = _patch_fix_tsr_tagging_tag(updated_content) | |
| updated_content = _trim_space_tags(updated_content) | |
| # 3. close file | |
| write_file(dest_file_path, updated_content) | |
| return dest_file_path | |
| def _patch_fix_nmfp3_tag(content: str): | |
| return re.sub(r"@nmfp-dotnet3-dotnet", "@nmfp3-dotnet", content) | |
| def _patch_fix_tsr_tagging_tag(content: str): | |
| return re.sub(r"@tsr-dotnet_tagging-dotnet", "@tsr_tagging-dotnet", content) | |
| def convert_files_to_dotnet_upgrade(file_paths: list[str]): | |
| """Auto convert feature files to dotnet upgrade feature files""" | |
| abs_files = map(get_abs_path, file_paths) | |
| return list(map(convert_file_to_dotnet_upgrade, abs_files)) | |
| def _add_suffix_to_test_cases(content: str, suffix: str = "-dotnet") -> str: | |
| """Auto add suffix to test cases""" | |
| content = re.sub(r"@([\d\-_]+)", r"@\1{}".format(suffix), content) | |
| content = re.sub(r"@(tc_[\d]+)", r"@\1{}".format(suffix), content) | |
| content = re.sub(r"@(tc[\d]+)", r"@\1{}".format(suffix), content) | |
| return content | |
| def _add_suffix_to_tags(content: str, tags: list[str], suffix: str = "-dotnet") -> str: | |
| """Auto add suffix to tags""" | |
| return reduce( | |
| lambda acc, tag: re.sub(r"{}".format(tag), r"{}{}".format(tag, suffix), acc), | |
| tags, | |
| content, | |
| ) | |
| def _remove_tags(content: str, tags: list[str]) -> str: | |
| """Remove tags""" | |
| return reduce(lambda acc, tag: re.sub(r"{}".format(tag), r"", acc), tags, content) | |
| def _trim_space_tags(content: str) -> str: | |
| """Trim space between tags""" | |
| content = re.sub(r"(\w) @", r"\1 @", content) | |
| content = re.sub(r"^ @", "@", content) | |
| content = re.sub(r"( )+\n", "\n", content) | |
| return content | |
| def is_have_been_copied(file_path: str) -> bool: | |
| """Check whether a file was copied to the dotnet_upgrade folder""" | |
| dest_file_path = get_dest_file_path(file_path) | |
| return is_exists_path(dest_file_path) | |
| def list_abs_files_and_folders(folder_path: str) -> list[str]: | |
| """List all files and folders as absolute path in the folder""" | |
| # 1. Transform to absolute folder path if needed | |
| abs_dir = get_abs_path(folder_path) | |
| # 2. List all files and directories as the absolute path in the input folder | |
| if is_file(abs_dir): | |
| return [abs_dir] | |
| items = os.listdir(abs_dir) | |
| return list(map(lambda item: "/".join([abs_dir, item]), items)) | |
| def list_abs_files(folder_path: str, includes_sub_folders=False) -> list[str]: | |
| """List all files as absolute path in the folder""" | |
| abs_dir = get_abs_path(folder_path) | |
| if is_file(abs_dir): | |
| return [abs_dir] | |
| items = list_abs_files_and_folders(abs_dir) | |
| if includes_sub_folders: | |
| files = [] | |
| for item in items: | |
| if is_file(item): | |
| files.append(item) | |
| elif is_directory(item): | |
| files.extend(list_abs_files(item)) | |
| else: | |
| raise Exception("This file type does not support: {}".format(item)) | |
| return files | |
| else: | |
| return list(filter(is_file, items)) | |
| def count_text(content: str, text: str) -> int: | |
| """Count the number of text in the content""" | |
| """Count the number of text in the content""" | |
| return len(re.findall(text, content)) | |
| def list_files_have_not_called_assert_all_in_multiple_folders(folder_paths: list[str]) -> list[str]: | |
| """List all files have not called assert all in multiple folders""" | |
| return flatten(map(list_files_have_not_called_assert_all, folder_paths)) | |
| def list_files_have_not_called_assert_all(folder_path: str) -> list[str]: | |
| """List all files have not called assert all""" | |
| # 1. List all files as the absolute path in the input folder | |
| files = list_abs_files(folder_path, True) | |
| # 2. If the new soft assertion text count isn't equal to assert all count, then add to result | |
| def diff_assert_all_called(file): | |
| content = read_file(file) | |
| new_soft_assertion_text_count = count_text(content, "new SoftAssertion()") | |
| assert_all_count = count_text(content, "assertAll()") | |
| return new_soft_assertion_text_count != assert_all_count | |
| result = list(filter(diff_assert_all_called, files)) | |
| return result | |
| def list_files_have_not_been_copied(folder_path: str) -> list[str]: | |
| """List all files have not been copied to dotnet_upgrade folder""" | |
| result = [] | |
| # 1. List all files and directories as the absolute path in the input folder | |
| abs_items = list_abs_files_and_folders(folder_path) | |
| # 2. If the sub file has not been copied, then add to result | |
| # If the item is the directory, then list all files have not been copied in this sub directory | |
| for item in abs_items: | |
| if is_file(item): | |
| if not is_have_been_copied(item): | |
| result.append(item) | |
| elif is_directory(item): | |
| result.extend(list_files_have_not_been_copied(item)) | |
| else: | |
| raise Exception("This file type does not support: {}".format(item)) | |
| return result | |
| def list_files_have_not_been_copied_in_multiple_dirs( | |
| folder_paths: list[str], ignore_paths: list[str] = [] | |
| ) -> list[str]: | |
| """List all files have not been copied to dotnet_upgrade folder""" | |
| def is_ignore(path: str) -> bool: | |
| for p in ignore_paths: | |
| if p in path: | |
| return True | |
| return False | |
| res = reduce(lambda x, y: x + y, map(list_files_have_not_been_copied, folder_paths)) | |
| res = list(filter(lambda p: not is_ignore(p), res)) | |
| return unique_list(res) | |
| def get_relative_path(file_path: str) -> str: | |
| """Get relative path from the absolute path""" | |
| if is_abs_path(file_path): | |
| return re.sub(r"{}".format(PROJECT_DIR), "", file_path) | |
| return file_path | |
| def _replace_to_dotnet_upgrade_env(content: str) -> str: | |
| """Replace all env to dotnet upgrade env""" | |
| # ar | |
| content = re.sub(r'"auto"', r'"autonet"', content) | |
| content = re.sub(r'"hsire"', r'"hsireautonet"', content) | |
| content = re.sub(r'"autoaptsr"', r'"autoaptsrnet"', content) | |
| content = re.sub(r'"emberdemo"', r'"autoneted"', content) | |
| # nmfp | |
| content = re.sub(r'"BNY"|"bny"', r'"dfs"', content) | |
| content = re.sub(r'"Auto_ProductionA"|"auto_productiona"', r'"autonetaf"', content) | |
| # ar new doc | |
| content = re.sub(r'"factsheet"', r'"factsheetnet"', content) | |
| content = re.sub(r'"priipskid1"', r'"priipskidnet"', content) | |
| content = re.sub(r'"ucits"', r'"UCITSnet"', content) | |
| return content | |
| def get_dest_file_path(src_file_path: str) -> str: | |
| """Get dotnet_upgrade file path mapping with the source file path""" | |
| src_folders = list(map(lambda f: "/{}/".format(f), ROOT_SRC_FOLDERS)) | |
| dest_folder = list(filter(lambda f: f in src_file_path, src_folders))[0] | |
| dest_file_path_arr = src_file_path.split(dest_folder) | |
| dest_file_path_arr.insert(1, "/dotnet_upgrade{}".format(dest_folder)) | |
| dest_file_path = "".join(dest_file_path_arr) | |
| return dest_file_path | |
| def copy_file_to_dotnet_upgrade(file_path: str) -> str: | |
| """Copy file to dotnet upgrade folder""" | |
| dest_file_path = get_dest_file_path(file_path) | |
| # create folder if needed and copy override | |
| # os.makedirs(dest_file_path, exist_ok=True, mode=0o777) | |
| shutil.copy2(file_path, dest_file_path) | |
| return dest_file_path | |
| def get_runner_tags_by_scenario_names(file_paths: list[str], names: list[str]) -> str: | |
| tags = get_tags_by_scenario_names(file_paths, names) | |
| return " | ".join(tags) | |
| def get_all_scenarios_in_files(file_paths: list[str]) -> list[str]: | |
| parser = Parser() | |
| abs_file_paths = map(get_abs_path, file_paths) | |
| contents = map(read_file, abs_file_paths) | |
| parsed_datas = map(lambda x: parser.parse(x), contents) | |
| return flatten(map(get_all_scenarios, parsed_datas)) | |
| def get_all_scenarios_in_folder( | |
| folder_path: str, includes_sub_folders=False | |
| ) -> list[str]: | |
| file_paths = list_abs_files(folder_path, includes_sub_folders) | |
| return get_all_scenarios_in_files(file_paths) | |
| def get_all_scenarios_in_folders( | |
| folder_paths: list[str], includes_sub_folders=False | |
| ) -> list[str]: | |
| def _all_scenarios_in_folder(folder_path: str) -> list[str]: | |
| return get_all_scenarios_in_folder(folder_path, includes_sub_folders) | |
| return flatten(map(_all_scenarios_in_folder, folder_paths)) | |
| # [...temp1.querySelectorAll(".clickable-text")].map(e => "'" + e.innerText + "'").join(",\n") | |
| def get_tags_by_scenario_names(file_paths: list[str], names: list[str]) -> list[str]: | |
| scenarios = get_all_scenarios_in_files(file_paths) | |
| def get_runner_test_case(name: str): | |
| scenarios_filter = list(filter(lambda s: s["name"] == name, scenarios)) | |
| tags = filter( | |
| lambda _tags: len(_tags) > 0, map(lambda s: s.get("tags"), scenarios_filter) | |
| ) | |
| runner_test_cases = list(map(get_runner_test_case_from_tags, tags)) | |
| runner_test_case = runner_test_cases[0] if len(runner_test_cases) > 0 else None | |
| return runner_test_case | |
| return compact(map(get_runner_test_case, names)) | |
| ##### main actions ##### | |
| files = [ | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/calculation_sheet_worksheet.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/class_balances_worksheet.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/fund_data_points.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/history_worksheet.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/horizontal_totals.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/journal_entry_audit_report.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/lock_reporting_period.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/proofing.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/reasonability_checks.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/supplemental_data.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/trial_balance.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/worksheets/trial_balance_definitions.feature", | |
| "src/test/resources/features/ui/arc_reporting/financial_workbook/financial_workbook_general.feature", | |
| "src/test/resources/features/ui/arc_reporting/tsr/financial_workbook/financial_workbook_tsr.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/account_types.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/balance_types.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/base_classes.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/category_types.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/currencies.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/exchange_rate_sources.feature", | |
| # "src/test/resources/features/ui/arc_reporting/environment/data_settings/reporting_periods.feature", | |
| # "src/test/resources/features/ui/nmfp/administration/environment/balance_types/balance_types.feature", | |
| # "src/test/resources/features/ui/nmfp/administration/environment/base_classes/base_classes.feature", | |
| # "src/test/resources/features/ui/nmfp/administration/environment/currencies/currencies.feature", | |
| # "src/test/resources/features/ui/nmfp/administration/environment/reporting_periods/reporting_periods.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/abbreviations.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/categories.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/chart_of_accounts.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/custom_data_points.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/issuer_master.feature", | |
| # "src/test/resources/features/ui/arc_reporting/libraries/data_libraries/supplemental_data_import.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/categories/category_libraries_navigation.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/categories/category_libraries_push_changes.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/categories/category_library_update_categories.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/categories/category_library_update_libraries.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/categories/category_library_view_categories.feature", | |
| # "src/test/resources/features/ui/nmfp3/data_libraries/categories/category_library.feature", | |
| # "src/test/resources/features/ui/nmfp3/data_libraries/categories/category_library_view_categories.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/currency_captions/currency_caption_libraries.feature", | |
| # "src/test/resources/features/ui/nmfp/data_libraries/currency_captions/currency_caption_libraries_navigation.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/financial_workbook_templates/trial_balance_definitions.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/trial_balance_definitions.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/shared_legend.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/shared_legend_templates/shared_legend_template_output_settings_overlay_page.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/shared_legend_templates/shared_legend_template_output_settings_page.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/configurations/shared_legend_templates/shared_legend_templates.feature", | |
| # "src/test/resources/features/ui/arc_reporting/fundholdings/configuration/fund_holdings.configuration.feature", | |
| # "src/test/resources/features/ui/arc_reporting/fundholdings/fund_holdings_issuer_master.feature", | |
| # "src/test/resources/features/ui/nmfp/fund_holdings/fund_holdings_exception.feature", | |
| # "src/test/resources/features/ui/nmfp/fund_holdings/fund_holdings_navigation.feature", | |
| # "src/test/resources/features/ui/nmfp/fund_holdings/fund_holdings_quicklink.feature", | |
| # "src/test/resources/features/ui/nmfp/fund_holdings/fund_holdings_unsaved_change.feature", | |
| # "src/test/resources/features/ui/nmfp3/fund_holdings/fund_holdings_navigation.feature", | |
| # "src/test/resources/features/ui/nmfp3/fund_holdings/fund_holdings_unsaved_change.feature", | |
| # "src/test/resources/features/ui/nmfp3/fund_holdings/fund_holdings_update_fields.feature", | |
| # "src/test/resources/features/ui/nmfp3/fund_holdings/fund_holdings_view.feature", | |
| # "src/test/resources/features/ui/arc_reporting/assignment_reports/content/text_snippet_usage_report.feature", | |
| # "src/test/resources/features/ui/arc_reporting/assignment_reports/template/financial_statement_template_usage_report_standalone_page.feature", | |
| # "src/test/resources/features/ui/arc_reporting/assignment_reports/template/tabular_data_template_usage_report_overlay_page.feature", | |
| # "src/test/resources/features/ui/arc_reporting/assignment_reports/attribute/attribute_usage_report.feature", | |
| # "src/test/resources/features/ui/nmfp/assignment_reports/content/text_snippet_usage_report.feature", | |
| # "src/test/resources/features/ui/nmfp/assignment_reports/template/missing_output_settings_report_navigation.feature", | |
| # "src/test/resources/features/ui/nmfp/assignment_reports/template/portfolio_statement_template_usage_report.feature", | |
| # "src/test/resources/features/ui/nmfp/assignment_reports/template/tabular_data_template_usage_report.feature", | |
| # "src/test/resources/features/ui/arc_reporting/documents/proofs_and_checks_overlay/proofs_and_checks_overlay.feature", | |
| # "src/test/resources/features/ui/arc_reporting/tsr/users/features_access.feature", | |
| # "src/test/resources/features/ui/nmfp3/nmfp_document/documents/portfolio_edit_fund_holdings.feature", | |
| ] | |
| destination_files = convert_files_to_dotnet_upgrade(files) | |
| print("\n===== files copied:") | |
| pp(destination_files) | |
| # destination_files = list(map(get_dest_file_path, files)) | |
| # print("\n===== test case ids:") | |
| # print(get_all_runner_test_case_tags(destination_files)) | |
| # List files haven't been called assert all | |
| # directories = [ | |
| # "src/main/java/actions/ui" | |
| # ] | |
| # print("\n===== files have not called assert all:") | |
| # print(list_files_have_not_called_assert_all_in_multiple_folders(directories)) | |
| # List files have not been copied | |
| # directories = [ | |
| # "src/test/resources/features/ui/arc_reporting", | |
| # "src/test/resources/features/ui/auth0", | |
| # "src/test/resources/features/ui/nmfp3", | |
| # "src/test/resources/features/ui/nmfp", | |
| # "src/test/resources/features/ui/psp", | |
| # ] | |
| # ignore_directories = [ | |
| # "src/test/resources/features/ui/arc_reporting/publishing", | |
| # ] | |
| # directories = [ | |
| # # AR | |
| # "src/test/resources/features/ui/arc_reporting/assignment_reports", | |
| # "src/test/resources/features/ui/arc_reporting/audit_reports", | |
| # "src/test/resources/features/ui/arc_reporting/control_reports", | |
| # "src/test/resources/features/ui/arc_reporting/data_ingestion", | |
| # "src/test/resources/features/ui/arc_reporting/documents", | |
| # "src/test/resources/features/ui/arc_reporting/environment", | |
| # "src/test/resources/features/ui/arc_reporting/financial_workbook", | |
| # "src/test/resources/features/ui/arc_reporting/footnotes", | |
| # "src/test/resources/features/ui/arc_reporting/fundholdings", | |
| # "src/test/resources/features/ui/arc_reporting/funds", | |
| # "src/test/resources/features/ui/arc_reporting/libraries", | |
| # "src/test/resources/features/ui/arc_reporting/other_reports", | |
| # # "src/test/resources/features/ui/arc_reporting/publishing", | |
| # "src/test/resources/features/ui/arc_reporting/supporting_schedules", | |
| # "src/test/resources/features/ui/arc_reporting/system", | |
| # "src/test/resources/features/ui/arc_reporting/users", | |
| # "src/test/resources/features/ui/auth0", | |
| # # # TSR | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/audit_reports", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/control_reports", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/documents", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/financial_workbook", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/funds", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/libraries", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/tagging_and_filing", | |
| # # "src/test/resources/features/ui/arc_reporting/tsr/users", | |
| # # # NMFP | |
| # # "src/test/resources/features/ui/nmfp/administration", | |
| # # "src/test/resources/features/ui/nmfp/assignment_reports", | |
| # # "src/test/resources/features/ui/nmfp/audit_reports", | |
| # # "src/test/resources/features/ui/nmfp/class_information", | |
| # # "src/test/resources/features/ui/nmfp/content_libraries", | |
| # # "src/test/resources/features/ui/nmfp/control_reports", | |
| # # "src/test/resources/features/ui/nmfp/data_ingestion_archive", | |
| # # "src/test/resources/features/ui/nmfp/data_ingestion_configuration", | |
| # # "src/test/resources/features/ui/nmfp/data_libraries", | |
| # # "src/test/resources/features/ui/nmfp/data_purge", | |
| # # "src/test/resources/features/ui/nmfp/filing_manager", | |
| # # "src/test/resources/features/ui/nmfp/fund_class_setup", | |
| # # "src/test/resources/features/ui/nmfp/fund_holdings", | |
| # # "src/test/resources/features/ui/nmfp/fund_information", | |
| # # "src/test/resources/features/ui/nmfp/funds", | |
| # # "src/test/resources/features/ui/nmfp/general_information", | |
| # # "src/test/resources/features/ui/nmfp/landing", | |
| # # "src/test/resources/features/ui/nmfp/nmfp_document", | |
| # # "src/test/resources/features/ui/nmfp/nmfp_document_configuration", | |
| # # "src/test/resources/features/ui/nmfp/other_reports", | |
| # # "src/test/resources/features/ui/nmfp/output_queue", | |
| # # "src/test/resources/features/ui/nmfp/repo_holdings_collateral", | |
| # # "src/test/resources/features/ui/nmfp/system_settings", | |
| # # "src/test/resources/features/ui/nmfp/users", | |
| # # "src/test/resources/features/ui/nmfp", | |
| # # # NMFP3 | |
| # # "src/test/resources/features/ui/nmfp3/audit_reports", | |
| # # "src/test/resources/features/ui/nmfp3/class_information", | |
| # # "src/test/resources/features/ui/nmfp3/content_library", | |
| # # "src/test/resources/features/ui/nmfp3/control_reports", | |
| # # "src/test/resources/features/ui/nmfp3/audit_reports_archive", | |
| # # "src/test/resources/features/ui/nmfp3/data_ingestion_configuration", | |
| # # "src/test/resources/features/ui/nmfp3/data_libraries", | |
| # # "src/test/resources/features/ui/nmfp3/data_purge", | |
| # # "src/test/resources/features/ui/nmfp3/disposition_of_securities", | |
| # # "src/test/resources/features/ui/nmfp3/filing_manager", | |
| # # "src/test/resources/features/ui/nmfp3/fund_class_setup", | |
| # # "src/test/resources/features/ui/nmfp3/fund_holdings", | |
| # # "src/test/resources/features/ui/nmfp3/fund_information", | |
| # # "src/test/resources/features/ui/nmfp3/general_information", | |
| # # "src/test/resources/features/ui/nmfp3/nmfp_document", | |
| # # "src/test/resources/features/ui/nmfp3/other_reports", | |
| # # "src/test/resources/features/ui/nmfp3/repo_holdings_collateral", | |
| # # # PSP | |
| # # "src/test/resources/features/ui/psp", | |
| # ] | |
| # res = list( | |
| # map( | |
| # get_relative_path, list_files_have_not_been_copied_in_multiple_dirs(directories, ignore_directories) | |
| # ) | |
| # ) | |
| # print("\n==== files have not been copied:") | |
| # pp(res, "") | |
| # List all scenario names | |
| # directories = [ | |
| # "src/test/resources/features/ui/arc_reporting/tsr" | |
| # ] | |
| # print("\n=========== all scenarios in folder =============") | |
| # pp(map(lambda s: s["name"], get_all_scenarios_in_folders(directories, True)), "- ") | |
| # get all runner tags of multiple feature files | |
| # files = [ | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/history_worksheet_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/horizontal_totals_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/journal_entry_audit_report_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/lock_reporting_period_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/reasonability_checks_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/proofing_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/supplemental_data_dotnet.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/financial_workbook/worksheets/trial_balance_dotnet.feature" | |
| # ] | |
| # print("\n===== test case ids:") | |
| # print(get_all_runner_test_case_tags(files)) | |
| # get all tags for scenario name | |
| # files = [ | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/fundholdings/configuration/fund_holdings.configuration.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/fundholdings/fund_holdings.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/fundholdings/fund_holdings_general.feature", | |
| # "src/test/resources/features/ui/dotnet_upgrade/arc_reporting/fundholdings/fund_holdings_issuer_master.feature", | |
| # ] | |
| # names = [ | |
| # "ArcReportingTests", | |
| # "User is able to add a new abbreviation in a Fund holding and visualize it", | |
| # "User is able to import fund Holdings data from the fund holdings page", | |
| # 'Verify that all fields that existing in the imported document are overwritten to Fund Holding table in case "Overwrite with blanks" checkbox selected', | |
| # 'Verify that only the fields that exists in the imported document are overwritten to Fund Holding table in case "Overwrite with blanks" checkbox selected', | |
| # 'Verify that only the fields that exists in the imported document are overwritten to Fund Holding table in case "Overwrite with blanks" checkbox unselected', | |
| # "User is able to import fund Holdings data from the fund holdings page for SRE in production", | |
| # "Verify user can/cannot open the Open Issuer Master Page if they have/don't have the user right to them", | |
| # 'Verify user is able to view the "Add New Issuer" button on the "Select Issuer" popup', | |
| # 'Verify user is able to make change "Edit/Remove" and Revert changes in the Issuer Master overlay page', | |
| # "Verify user is able to select a profile that was created with NO complex name and import file successfully", | |
| # 'Verify that only the fields that have value in the imported document are overwritten to Fund Holding table in case "Overwrite with blanks" checkbox unselected', | |
| # "Verify user is able to bulk change Fair Value Type", | |
| # "Verify user is able to bulk change Fair Value Type for the filtered rows of the grid", | |
| # "Verify the counter party is able to update value for the same master identifier", | |
| # ] | |
| # print("\n===== runner tags:") | |
| # print(get_runner_tags_by_scenario_names(files, names)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment