SYMBOL INDEX (1206 symbols across 194 files) FILE: assets/gab/metadata/tables/dim_calendar.sql type `database` (line 2) | CREATE EXTERNAL TABLE `database`.dim_calendar ( FILE: assets/gab/metadata/tables/dummy_sales_kpi.sql type `database` (line 2) | CREATE EXTERNAL TABLE `database`.`dummy_sales_kpi` ( FILE: assets/gab/metadata/tables/gab_log_events.sql type `database` (line 2) | CREATE EXTERNAL TABLE `database`.`gab_log_events` FILE: assets/gab/metadata/tables/gab_use_case_results.sql type `database` (line 2) | CREATE EXTERNAL TABLE `database`.`gab_use_case_results` FILE: assets/gab/metadata/tables/lkp_query_builder.sql type `database` (line 2) | CREATE EXTERNAL TABLE `database`.`lkp_query_builder` FILE: assets/gab/notebooks/gab.py function flatten_extend (line 19) | def flatten_extend(list_to_flatten: list) -> list: FILE: assets/gab/notebooks/gab_job_manager.py function divide_chunks (line 56) | def divide_chunks(input_list: list, max_number_of_jobs: int) -> list: function get_run_regions (line 81) | def get_run_regions(job_schedule: dict, job_info: dict) -> list: FILE: assets/gab/utils/databricks_job_utils.py class BearerAuth (line 12) | class BearerAuth: method __init__ (line 15) | def __init__(self, token): method __call__ (line 19) | def __call__(self, r): class ResultState (line 28) | class ResultState(str, enum.Enum): class DatabricksJobs (line 37) | class DatabricksJobs: method __init__ (line 51) | def __init__(self, databricks_instance: str, auth: str): method _check_response (line 63) | def _check_response(response): method list_jobs (line 67) | def list_jobs(self, name: str = None, limit: int = 20, offset: int = 0... method run_now (line 95) | def run_now(self, job_id: int, notebook_params: dict, idempotency_toke... method get_output (line 123) | def get_output(self, run_id: int) -> dict: method get_job (line 147) | def get_job(self, run_id: int) -> dict: method cancel_job (line 166) | def cancel_job(self, run_id: int) -> dict: method trigger_job_by_name (line 185) | def trigger_job_by_name(self, job_name: str, notebook_params: dict, id... method get_job_status (line 202) | def get_job_status(self, run_id: int) -> Tuple[bool, dict]: method job_id_extraction (line 215) | def job_id_extraction(self, job_name: str) -> int: FILE: assets/gab/utils/query_builder_utils.py class QueryBuilderUtils (line 8) | class QueryBuilderUtils: method __init__ (line 11) | def __init__(self): method check_config_inputs (line 16) | def check_config_inputs( method create_sql_statement (line 112) | def create_sql_statement( method get_dimensions (line 188) | def get_dimensions(self, num_dimensions: str) -> str: method get_recon_choices (line 219) | def get_recon_choices(cls) -> list: method get_metric_configuration (line 266) | def get_metric_configuration(cls, num_of_metrics: str) -> dict: method get_recon_config (line 335) | def get_recon_config(self, recon_list: list) -> dict: method get_stages (line 385) | def get_stages(self, sql_files_list: list, usecase_name: str) -> dict: method get_view_information (line 437) | def get_view_information(self, num_of_views: str) -> dict: method insert_data_into_lkp_query_builder (line 467) | def insert_data_into_lkp_query_builder(cls, delete_sttmt: str, insert_... method print_definitions (line 483) | def print_definitions( method set_dimensions (line 549) | def set_dimensions(cls, num_dimensions: str): method set_extra_metric_config (line 564) | def set_extra_metric_config(self, num_of_metrics: str, metrics_dict: d... method set_metric (line 582) | def set_metric(cls, num_of_metrics: str): method set_stages (line 601) | def set_stages(self, sql_files: list) -> list: method set_views (line 647) | def set_views(cls, num_of_views: str): method _format_keys_list (line 664) | def _format_keys_list(cls, key_str: str) -> list: method _generate_query_id (line 682) | def _generate_query_id(cls, usecase_name: str) -> int: method _get_mapping (line 699) | def _get_mapping(cls, dims_dict: dict, dimensions: str, from_date: str... method _print_dims_dict (line 731) | def _print_dims_dict(cls, dims_dict: dict): method _print_derived_metrics (line 747) | def _print_derived_metrics(cls, key_metrics: str, derived_metric: str,... method _print_metrics_dict (line 767) | def _print_metrics_dict(self, key_metrics: str, metrics_dict: dict): method _print_recon_dict (line 797) | def _print_recon_dict(cls, recon_dict: dict): method _print_stages_dict (line 822) | def _print_stages_dict(cls, stages_dict: dict): method _sort_files (line 844) | def _sort_files(cls, sql_files: str) -> list: method _validate_metrics_config (line 870) | def _validate_metrics_config(cls, calc_metric: str, metrics_dict: dict... FILE: cicd/code_doc/custom_example_macros.py function _search_files (line 10) | def _search_files(file: dict, search_string: str) -> list: function _link_example (line 31) | def _link_example(method_name: str) -> str or None: function _get_dict_transformer (line 58) | def _get_dict_transformer(dict_to_search: dict, transformer: str) -> dict: function _highlight_examples (line 79) | def _highlight_examples(method_name: str) -> str or None: function get_example (line 120) | def get_example(method_name: str) -> str: function define_env (line 146) | def define_env(env): FILE: cicd/code_doc/mkdocs_macros.py function _search_files (line 10) | def _search_files(file: dict, search_string: str) -> list: function _link_example (line 31) | def _link_example(method_name: str) -> str or None: function _get_dict_transformer (line 58) | def _get_dict_transformer(dict_to_search: dict, transformer: str) -> dict: function _highlight_examples (line 79) | def _highlight_examples(method_name: str) -> str or None: function get_example (line 120) | def get_example(method_name: str) -> str: function format_operations_table (line 147) | def format_operations_table(operations_dict: dict) -> str: function get_table_manager_operations (line 178) | def get_table_manager_operations() -> str: function get_file_manager_operations (line 188) | def get_file_manager_operations() -> str: function define_env (line 198) | def define_env(env): FILE: cicd/code_doc/render_doc.py function _get_project_version (line 21) | def _get_project_version() -> str: function _search_files (line 32) | def _search_files(file: dict, search_string: str) -> list: function _get_dict_transformer (line 51) | def _get_dict_transformer(dict_to_search: dict, transformer: str) -> dict: function _link_example (line 69) | def _link_example(module_name: str) -> str or None: function _highlight_examples (line 86) | def _highlight_examples(module_name: str) -> str or None: FILE: cicd/code_doc/render_docs.py function _copy_documentation (line 44) | def _copy_documentation(directories: list = "", files: list = ""): FILE: lakehouse_engine/algorithms/algorithm.py class Algorithm (line 14) | class Algorithm(Executable): method __init__ (line 17) | def __init__(self, acon: dict): method get_dq_spec (line 26) | def get_dq_spec( method _get_dq_functions (line 120) | def _get_dq_functions(spec: dict, function_key: str) -> List[DQFunctio... method _validate_dq_tag_strategy (line 144) | def _validate_dq_tag_strategy(spec: DQSpec) -> None: FILE: lakehouse_engine/algorithms/data_loader.py class DataLoader (line 33) | class DataLoader(Algorithm): method __init__ (line 51) | def __init__(self, acon: dict): method read (line 85) | def read(self) -> OrderedDict: method transform (line 97) | def transform(self, data: OrderedDict) -> OrderedDict: method process_dq (line 130) | def process_dq( method write (line 183) | def write(self, data: OrderedDict) -> OrderedDict: method terminate (line 214) | def terminate(self, data: OrderedDict) -> None: method execute (line 227) | def execute(self) -> Optional[OrderedDict]: method _get_input_specs (line 257) | def _get_input_specs(self) -> List[InputSpec]: method _get_transform_specs (line 265) | def _get_transform_specs(self) -> List[TransformSpec]: method _get_dq_specs (line 332) | def _get_dq_specs(self) -> List[DQSpec]: method _get_output_specs (line 375) | def _get_output_specs(self) -> List[OutputSpec]: method _get_streaming_transformer_plan (line 433) | def _get_streaming_transformer_plan( method _get_terminate_specs (line 464) | def _get_terminate_specs(self) -> List[TerminatorSpec]: method _move_to_streaming_micro_batch_transformers (line 472) | def _move_to_streaming_micro_batch_transformers( method _move_to_streaming_micro_batch_dq_processors (line 499) | def _move_to_streaming_micro_batch_dq_processors( method _get_input_read_types (line 529) | def _get_input_read_types(list_of_specs: List) -> dict: method _get_transform_input_ids (line 541) | def _get_transform_input_ids(list_of_specs: List) -> dict: method _get_previous_spec_read_types (line 553) | def _get_previous_spec_read_types( method _verify_dq_rule_id_uniqueness (line 577) | def _verify_dq_rule_id_uniqueness( FILE: lakehouse_engine/algorithms/dq_validator.py class DQValidator (line 20) | class DQValidator(Algorithm): method __init__ (line 32) | def __init__(self, acon: dict): method read (line 55) | def read(self) -> DataFrame: method process_dq (line 65) | def process_dq(self, data: DataFrame) -> DataFrame: method execute (line 87) | def execute(self) -> None: method _get_dq_spec (line 138) | def _get_dq_spec(input_dq_spec: dict) -> DQSpec: method _restore_prev_version (line 154) | def _restore_prev_version(self) -> None: FILE: lakehouse_engine/algorithms/exceptions.py class ReconciliationFailedException (line 4) | class ReconciliationFailedException(Exception): class NoNewDataException (line 10) | class NoNewDataException(Exception): class SensorAlreadyExistsException (line 16) | class SensorAlreadyExistsException(Exception): class RestoreTypeNotFoundException (line 22) | class RestoreTypeNotFoundException(Exception): FILE: lakehouse_engine/algorithms/gab.py class GAB (line 32) | class GAB(Algorithm): method __init__ (line 41) | def __init__(self, acon: dict): method execute (line 49) | def execute(self) -> None: method _process_use_case (line 98) | def _process_use_case( method _set_use_case_stage_template_file (line 168) | def _set_use_case_stage_template_file( method _process_use_case_query_cadence (line 192) | def _process_use_case_query_cadence( method _process_reconciliation_cadence (line 253) | def _process_reconciliation_cadence( method _process_use_case_query_step (line 374) | def _process_use_case_query_step( method _get_filtered_cadences (line 455) | def _get_filtered_cadences( method _get_latest_usecase_data (line 474) | def _get_latest_usecase_data(self, query_id: str) -> tuple[datetime, d... method _get_latest_run_date (line 485) | def _get_latest_run_date(self, query_id: str) -> datetime: method _get_latest_use_case_date (line 513) | def _get_latest_use_case_date(self, query_id: str) -> datetime: method _set_week_configuration_by_uc_start_of_week (line 535) | def _set_week_configuration_by_uc_start_of_week(cls, start_of_week: st... method _update_rendered_item_cadence (line 554) | def _update_rendered_item_cadence( method _get_rendered_item_cadence (line 573) | def _get_rendered_item_cadence( method _get_cadence_configuration (line 607) | def _get_cadence_configuration( method _render_template_query (line 686) | def _render_template_query( method _create_stage_view (line 734) | def _create_stage_view( method _generate_view_statement (line 826) | def _generate_view_statement( method _unpersist_cached_views (line 898) | def _unpersist_cached_views(cls, unpersist_list: list[str]) -> None: method _generate_ddl (line 909) | def _generate_ddl( FILE: lakehouse_engine/algorithms/reconciliator.py class ReconciliationType (line 27) | class ReconciliationType(Enum): class ReconciliationTransformers (line 34) | class ReconciliationTransformers(Enum): class Reconciliator (line 43) | class Reconciliator(Executable): method __init__ (line 84) | def __init__(self, acon: dict): method get_source_of_truth (line 103) | def get_source_of_truth(self) -> DataFrame: method get_current_results (line 116) | def get_current_results(self) -> DataFrame: method execute (line 129) | def execute(self) -> None: method _apply_preprocess_query_args (line 185) | def _apply_preprocess_query_args( method _get_recon_results (line 221) | def _get_recon_results( FILE: lakehouse_engine/algorithms/sensor.py class Sensor (line 25) | class Sensor(Algorithm): method __init__ (line 30) | def __init__(self, acon: dict): method execute (line 44) | def execute(self) -> bool: method _check_if_sensor_already_exists (line 74) | def _check_if_sensor_already_exists(self) -> bool: method _run_streaming_sensor (line 91) | def _run_streaming_sensor( method _run_batch_sensor (line 111) | def _run_batch_sensor( method _validate_sensor_spec (line 149) | def _validate_sensor_spec(self) -> None: FILE: lakehouse_engine/algorithms/sensors/heartbeat.py class Heartbeat (line 42) | class Heartbeat(Algorithm): method __init__ (line 47) | def __init__(self, acon: dict): method execute (line 55) | def execute(self) -> None: method _get_active_heartbeat_jobs (line 91) | def _get_active_heartbeat_jobs( method generate_unique_column_values (line 114) | def generate_unique_column_values(cls, main_col: str, col_to_append: s... method _get_sensor_acon_from_heartbeat (line 129) | def _get_sensor_acon_from_heartbeat( method _enhance_sensor_acon_extra_options (line 189) | def _enhance_sensor_acon_extra_options( method _get_all_kafka_options (line 272) | def _get_all_kafka_options( method _execute_batch_of_sensor (line 340) | def _execute_batch_of_sensor( method _get_heartbeat_sensor_condition (line 365) | def _get_heartbeat_sensor_condition( method _update_heartbeat_status_with_sensor_info (line 384) | def _update_heartbeat_status_with_sensor_info( method update_heartbeat_control_table (line 425) | def update_heartbeat_control_table( method get_heartbeat_jobs_to_trigger (line 447) | def get_heartbeat_jobs_to_trigger( method get_anchor_job_record (line 546) | def get_anchor_job_record( method heartbeat_sensor_trigger_jobs (line 587) | def heartbeat_sensor_trigger_jobs(self) -> None: method _read_heartbeat_sensor_data_feed_csv (line 646) | def _read_heartbeat_sensor_data_feed_csv( method merge_control_table_data_feed_records (line 666) | def merge_control_table_data_feed_records( method heartbeat_sensor_control_table_data_feed (line 727) | def heartbeat_sensor_control_table_data_feed( method update_sensor_processed_status (line 748) | def update_sensor_processed_status( method update_heartbeat_sensor_completion_status (line 781) | def update_heartbeat_sensor_completion_status( FILE: lakehouse_engine/algorithms/sensors/sensor.py class Sensor (line 25) | class Sensor(Algorithm): method __init__ (line 30) | def __init__(self, acon: dict): method execute (line 44) | def execute(self) -> bool: method _check_if_sensor_already_exists (line 74) | def _check_if_sensor_already_exists(self) -> bool: method _run_streaming_sensor (line 91) | def _run_streaming_sensor( method _run_batch_sensor (line 109) | def _run_batch_sensor( method _validate_sensor_spec (line 147) | def _validate_sensor_spec(self) -> None: FILE: lakehouse_engine/core/dbfs_file_manager.py function _dry_run (line 8) | def _dry_run(bucket: str, object_paths: list) -> dict: function _list_objects (line 34) | def _list_objects(path: str, objects_list: list) -> list: function _get_path (line 56) | def _get_path(bucket: str, path: str) -> str: class DBFSFileManager (line 79) | class DBFSFileManager(FileManager): method get_function (line 84) | def get_function(self) -> None: method _delete_objects (line 102) | def _delete_objects(bucket: str, objects_paths: list) -> None: method delete_objects (line 129) | def delete_objects(self) -> None: method copy_objects (line 147) | def copy_objects(self) -> None: method _copy_objects (line 173) | def _copy_objects( method move_objects (line 206) | def move_objects(self) -> None: method _move_objects (line 232) | def _move_objects( FILE: lakehouse_engine/core/definitions.py class CollectEngineUsage (line 22) | class CollectEngineUsage(Enum): class EngineConfig (line 39) | class EngineConfig(object): class EngineStats (line 80) | class EngineStats(object): class InputFormat (line 109) | class InputFormat(Enum): method values (line 128) | def values(cls): # type: ignore method exists (line 137) | def exists(cls, input_format: str) -> bool: class SharepointFile (line 163) | class SharepointFile: method file_extension (line 175) | def file_extension(self) -> str: method file_path (line 180) | def file_path(self) -> str: method is_csv (line 187) | def is_csv(self) -> bool: method is_excel (line 192) | def is_excel(self) -> bool: method content_size (line 197) | def content_size(self) -> int: class SharepointOptions (line 203) | class SharepointOptions(object): method __post_init__ (line 283) | def __post_init__(self) -> None: method _get_allowed_extensions (line 300) | def _get_allowed_extensions(self) -> set[str]: method _validate_file_type (line 309) | def _validate_file_type(self, allowed_file_types: set[str]) -> None: method _normalize_folder_relative_path (line 320) | def _normalize_folder_relative_path(self) -> None: method _ends_with_supported_extension (line 325) | def _ends_with_supported_extension( method _validate_single_file_mode_constraints_if_folder_is_file_path (line 336) | def _validate_single_file_mode_constraints_if_folder_is_file_path( method _validate_file_name_extension (line 365) | def _validate_file_name_extension(self, allowed_extensions: set[str]) ... method _validate_file_name_and_file_pattern_are_not_both_set (line 376) | def _validate_file_name_and_file_pattern_are_not_both_set(self) -> None: method _validate_folder_relative_path_extension_if_looks_like_file (line 384) | def _validate_folder_relative_path_extension_if_looks_like_file( method validate_for_reader (line 405) | def validate_for_reader(self) -> None: method validate_for_writer (line 421) | def validate_for_writer(self) -> None: class OutputFormat (line 430) | class OutputFormat(Enum): method values (line 449) | def values(cls): # type: ignore method exists (line 458) | def exists(cls, output_format: str) -> bool: class NotifierType (line 480) | class NotifierType(Enum): class NotificationRuntimeParameters (line 486) | class NotificationRuntimeParameters(Enum): class ReadType (line 501) | class ReadType(Enum): class ReadMode (line 512) | class ReadMode(Enum): class DQDefaults (line 523) | class DQDefaults(Enum): class WriteType (line 591) | class WriteType(Enum): class InputSpec (line 604) | class InputSpec(object): method __post_init__ (line 666) | def __post_init__(self) -> None: class TransformerSpec (line 678) | class TransformerSpec(object): class TransformSpec (line 692) | class TransformSpec(object): class DQType (line 713) | class DQType(Enum): class DQResultFormat (line 720) | class DQResultFormat(Enum): class DQExecutionPoint (line 726) | class DQExecutionPoint(Enum): class DQTableBaseParameters (line 733) | class DQTableBaseParameters(Enum): class DQFunctionSpec (line 740) | class DQFunctionSpec(object): class DQSpec (line 753) | class DQSpec(object): class MergeOptions (line 881) | class MergeOptions(object): class OutputSpec (line 910) | class OutputSpec(object): class TerminatorSpec (line 978) | class TerminatorSpec(object): class ReconciliatorSpec (line 995) | class ReconciliatorSpec(object): class DQValidatorSpec (line 1043) | class DQValidatorSpec(object): class SQLDefinitions (line 1059) | class SQLDefinitions(Enum): class FileManagerAPIKeys (line 1072) | class FileManagerAPIKeys(Enum): class SensorSpec (line 1083) | class SensorSpec(object): method create_from_acon (line 1114) | def create_from_acon(cls, acon: dict): # type: ignore class SensorStatus (line 1138) | class SensorStatus(Enum): class SAPLogchain (line 1172) | class SAPLogchain(Enum): class RestoreType (line 1180) | class RestoreType(Enum): method values (line 1188) | def values(cls): # type: ignore method exists (line 1197) | def exists(cls, restore_type: str) -> bool: class RestoreStatus (line 1209) | class RestoreStatus(Enum): class SQLParser (line 1224) | class SQLParser(Enum): class GABDefaults (line 1244) | class GABDefaults(Enum): class GABStartOfWeek (line 1253) | class GABStartOfWeek(Enum): method get_start_of_week (line 1260) | def get_start_of_week(cls) -> dict: method get_values (line 1271) | def get_values(cls) -> set[str]: class GABSpec (line 1281) | class GABSpec(object): method create_from_acon (line 1314) | def create_from_acon(cls, acon: dict): # type: ignore class GABCadence (line 1355) | class GABCadence(Enum): method get_ordered_cadences (line 1365) | def get_ordered_cadences(cls) -> dict: method get_cadences (line 1377) | def get_cadences(cls) -> set[str]: method order_cadences (line 1386) | def order_cadences(cls, cadences_to_order: list[str]) -> list[str]: class GABKeys (line 1398) | class GABKeys: class GABReplaceableKeys (line 1406) | class GABReplaceableKeys: class GABCombinedConfiguration (line 1415) | class GABCombinedConfiguration(Enum): class HeartbeatConfigSpec (line 1760) | class HeartbeatConfigSpec(object): method create_from_acon (line 1806) | def create_from_acon(cls, acon: dict): # type: ignore class HeartbeatSensorSource (line 1829) | class HeartbeatSensorSource(Enum): method values (line 1840) | def values(cls): # type: ignore class HeartbeatStatus (line 1849) | class HeartbeatStatus(Enum): FILE: lakehouse_engine/core/exec_env.py class ExecEnv (line 13) | class ExecEnv(object): method set_default_engine_config (line 26) | def set_default_engine_config( method get_or_create (line 55) | def get_or_create( method get_for_each_batch_session (line 107) | def get_for_each_batch_session(cls, df: DataFrame) -> None: method _set_spark_configs (line 117) | def _set_spark_configs( method get_environment (line 149) | def get_environment(cls) -> str: FILE: lakehouse_engine/core/executable.py class Executable (line 7) | class Executable(ABC): method execute (line 11) | def execute(self) -> Optional[Any]: FILE: lakehouse_engine/core/file_manager.py class FileManager (line 10) | class FileManager(ABC): # noqa: B024 method __init__ (line 16) | def __init__(self, configs: dict): method delete_objects (line 26) | def delete_objects(self) -> None: method copy_objects (line 35) | def copy_objects(self) -> None: method move_objects (line 44) | def move_objects(self) -> None: class FileManagerFactory (line 53) | class FileManagerFactory(ABC): # noqa: B024 method execute_function (line 57) | def execute_function(configs: dict) -> Any: FILE: lakehouse_engine/core/gab_manager.py class GABCadenceManager (line 17) | class GABCadenceManager(object): method extended_window_calculator (line 22) | def extended_window_calculator( method _get_reconciliation_cadence (line 83) | def _get_reconciliation_cadence( method get_cadence_start_end_dates (line 121) | def get_cadence_start_end_dates( method _get_cadence_calculated_date (line 187) | def _get_cadence_calculated_date( method _get_cadence_base_date (line 211) | def _get_cadence_base_date( method _get_calculated_week_date (line 234) | def _get_calculated_week_date( method _get_calculated_month_date (line 257) | def _get_calculated_month_date( method _get_calculated_quarter_or_year_date (line 283) | def _get_calculated_quarter_or_year_date( class GABViewManager (line 305) | class GABViewManager(object): method __init__ (line 310) | def __init__( method generate_use_case_views (line 330) | def generate_use_case_views(self) -> None: method _generate_use_case_view (line 366) | def _generate_use_case_view( method _get_dimensions_and_metrics_from_use_case_view (line 434) | def _get_dimensions_and_metrics_from_use_case_view( method _get_calculated_and_derived_metrics_from_use_case_view (line 484) | def _get_calculated_and_derived_metrics_from_use_case_view( method _join_list_to_string_when_present (line 528) | def _join_list_to_string_when_present( method _get_cadence_snapshot_status (line 546) | def _get_cadence_snapshot_status(cls, result: dict) -> dict: method _split_cadence_by_snapshot (line 568) | def _split_cadence_by_snapshot( method _get_calculated_metrics (line 588) | def _get_calculated_metrics( method _get_derived_metrics (line 630) | def _get_derived_metrics(cls, derived_metric: dict) -> list[str]: method _get_calculated_metric (line 646) | def _get_calculated_metric( method _get_window_calculated_metric (line 713) | def _get_window_calculated_metric( method _get_cadence_calculated_metric (line 774) | def _get_cadence_calculated_metric( method _get_cadence_item_lag (line 825) | def _get_cadence_item_lag( method _get_cadence_lag_statement (line 838) | def _get_cadence_lag_statement( FILE: lakehouse_engine/core/gab_sql_generator.py function _execute_sql (line 16) | def _execute_sql(func) -> Callable: # type: ignore class GABSQLGenerator (line 34) | class GABSQLGenerator(ABC): method generate_sql (line 38) | def generate_sql(self) -> Optional[str]: class GABInsertGenerator (line 46) | class GABInsertGenerator(GABSQLGenerator): method __init__ (line 55) | def __init__( method generate_sql (line 81) | def generate_sql(self) -> Optional[str]: method _insert_statement_generator (line 87) | def _insert_statement_generator(self) -> str: method _get_mapping_columns (line 115) | def _get_mapping_columns(cls, mapping: dict) -> tuple[str, str]: method _join_extracted_column_with_filled_columns (line 134) | def _join_extracted_column_with_filled_columns( method _fill_empty_columns (line 159) | def _fill_empty_columns( class GABViewGenerator (line 187) | class GABViewGenerator(GABSQLGenerator): method __init__ (line 195) | def __init__( method generate_sql (line 248) | def generate_sql(self) -> Optional[str]: method _create_consumption_view (line 254) | def _create_consumption_view(self) -> str: method _generate_consumption_view_statement (line 287) | def _generate_consumption_view_statement( class GABDeleteGenerator (line 429) | class GABDeleteGenerator(GABSQLGenerator): method __init__ (line 437) | def __init__( method generate_sql (line 464) | def generate_sql(self) -> Optional[str]: method _delete_statement_generator (line 473) | def _delete_statement_generator(self) -> str: FILE: lakehouse_engine/core/s3_file_manager.py function _dry_run (line 20) | def _dry_run(bucket: str, object_paths: list) -> dict: function _list_objects (line 46) | def _list_objects( function _list_objects_recursively (line 82) | def _list_objects_recursively(bucket: str, path: str) -> list: function _check_directory (line 109) | def _check_directory(bucket: str, path: str) -> bool: class S3FileManager (line 124) | class S3FileManager(FileManager): method get_function (line 129) | def get_function(self) -> None: method _delete_objects (line 150) | def _delete_objects(self, bucket: str, objects_paths: list) -> None: method delete_objects (line 186) | def delete_objects(self) -> None: method copy_objects (line 204) | def copy_objects(self) -> None: method move_objects (line 224) | def move_objects(self) -> None: method request_restore (line 232) | def request_restore(self) -> None: method check_restore_status (line 248) | def check_restore_status(self) -> None: method request_restore_to_destination_and_wait (line 267) | def request_restore_to_destination_and_wait(self) -> None: method _copy_objects (line 308) | def _copy_objects( class ArchiveFileManager (line 370) | class ArchiveFileManager(object): method _get_archived_object (line 376) | def _get_archived_object(bucket: str, object_key: str) -> Optional[Any]: method _check_object_restore_status (line 398) | def _check_object_restore_status( method check_restore_status (line 425) | def check_restore_status(source_bucket: str, source_object: str) -> dict: method _request_restore_object (line 479) | def _request_restore_object( method request_restore (line 515) | def request_restore( method request_restore_and_wait (line 555) | def request_restore_and_wait( FILE: lakehouse_engine/core/sensor_manager.py class SensorControlTableManager (line 24) | class SensorControlTableManager(object): method check_if_sensor_has_acquired_data (line 30) | def check_if_sensor_has_acquired_data( method update_sensor_status (line 55) | def update_sensor_status( method _update_sensor_control (line 101) | def _update_sensor_control( method _convert_sensor_to_data (line 128) | def _convert_sensor_to_data( method _get_sensor_update_set (line 169) | def _get_sensor_update_set(cls, **kwargs: Optional[str] | List[str]) -... method read_sensor_table_data (line 190) | def read_sensor_table_data( class SensorUpstreamManager (line 226) | class SensorUpstreamManager(object): method generate_filter_exp_query (line 232) | def generate_filter_exp_query( method generate_sensor_table_preprocess_query (line 307) | def generate_sensor_table_preprocess_query( method read_new_data (line 331) | def read_new_data(cls, sensor_spec: SensorSpec) -> DataFrame: method get_new_data (line 349) | def get_new_data( method generate_sensor_sap_logchain_query (line 365) | def generate_sensor_sap_logchain_query( class SensorJobRunManager (line 410) | class SensorJobRunManager(object): method run_job (line 416) | def run_job(cls, job_id: str, token: str, host: str) -> Tuple[int, Opt... FILE: lakehouse_engine/core/table_manager.py class TableManager (line 16) | class TableManager(object): method __init__ (line 22) | def __init__(self, configs: dict): method get_function (line 32) | def get_function(self) -> None: method create (line 62) | def create(self) -> None: method create_many (line 85) | def create_many(self) -> None: method compute_table_statistics (line 92) | def compute_table_statistics(self) -> None: method drop_table (line 105) | def drop_table(self) -> None: method drop_view (line 116) | def drop_view(self) -> None: method truncate (line 127) | def truncate(self) -> None: method vacuum (line 138) | def vacuum(self) -> None: method describe (line 153) | def describe(self) -> None: method optimize (line 164) | def optimize(self) -> None: method execute_multiple_sql_files (line 193) | def execute_multiple_sql_files(self) -> None: method execute_sql (line 216) | def execute_sql(self) -> None: method show_tbl_properties (line 229) | def show_tbl_properties(self) -> DataFrame: method get_tbl_pk (line 245) | def get_tbl_pk(self) -> List[str]: method repair_table (line 263) | def repair_table(self) -> None: method delete_where (line 277) | def delete_where(self) -> None: FILE: lakehouse_engine/dq_processors/custom_expectations/expect_column_pair_a_to_be_not_equal_to_b.py class ColumnPairCustom (line 15) | class ColumnPairCustom(ColumnPairMapMetricProvider): method _spark (line 32) | def _spark( class ExpectColumnPairAToBeNotEqualToB (line 53) | class ExpectColumnPairAToBeNotEqualToB(ColumnPairMapExpectation): method _validate (line 157) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_column_pair_a_to_be_smaller_or_equal_than_b.py class ColumnPairCustom (line 15) | class ColumnPairCustom(ColumnPairMapMetricProvider): method _spark (line 33) | def _spark( class ExpectColumnPairAToBeSmallerOrEqualThanB (line 63) | class ExpectColumnPairAToBeSmallerOrEqualThanB(ColumnPairMapExpectation): method _validate (line 171) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_column_pair_date_a_to_be_greater_than_or_equal_to_date_b.py class ColumnPairDateAToBeGreaterOrEqualToDateB (line 17) | class ColumnPairDateAToBeGreaterOrEqualToDateB(ColumnPairMapMetricProvid... method _spark (line 31) | def _spark( class ExpectColumnPairDateAToBeGreaterThanOrEqualToDateB (line 52) | class ExpectColumnPairDateAToBeGreaterThanOrEqualToDateB(ColumnPairMapEx... method _validate (line 169) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_column_values_to_be_date_not_older_than.py class ColumnValuesDateNotOlderThan (line 17) | class ColumnValuesDateNotOlderThan(ColumnMapMetricProvider): method _spark (line 30) | def _spark( class ExpectColumnValuesToBeDateNotOlderThan (line 67) | class ExpectColumnValuesToBeDateNotOlderThan(ColumnMapExpectation): method _validate (line 178) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_column_values_to_not_be_null_or_empty_string.py class ColumnValuesNotNullOrEpmtyString (line 15) | class ColumnValuesNotNullOrEpmtyString(ColumnMapMetricProvider): method _spark (line 29) | def _spark( class ExpectColumnValuesToNotBeNullOrEmptyString (line 46) | class ExpectColumnValuesToNotBeNullOrEmptyString(ColumnMapExpectation): method _validate (line 144) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_multicolumn_column_a_must_equal_b_or_c.py class MulticolumnCustomMetric (line 15) | class MulticolumnCustomMetric(MulticolumnMapMetricProvider): method _spark (line 33) | def _spark( class ExpectMulticolumnColumnAMustEqualBOrC (line 57) | class ExpectMulticolumnColumnAMustEqualBOrC(MulticolumnMapExpectation): method _validate (line 159) | def _validate( FILE: lakehouse_engine/dq_processors/custom_expectations/expect_queried_column_agg_value_to_be.py class ExpectQueriedColumnAggValueToBe (line 15) | class ExpectQueriedColumnAggValueToBe(QueryExpectation): method validate_configuration (line 53) | def validate_configuration( method _validate_between (line 68) | def _validate_between( method _validate_lesser (line 99) | def _validate_lesser(x: str, y: int, expected_max_value: int) -> dict: method _validate_greater (line 125) | def _validate_greater(x: str, y: int, expected_min_value: int) -> dict: method _validate_condition (line 150) | def _validate_condition(self, query_result: dict, template_dict: dict)... method _generate_dict (line 177) | def _generate_dict(query_result: list) -> dict: method _validate (line 210) | def _validate( method _validate_template_dict (line 238) | def _validate_template_dict(self: Any) -> dict: FILE: lakehouse_engine/dq_processors/dq_factory.py class DQFactory (line 59) | class DQFactory(object): method _add_critical_function_tag (line 66) | def _add_critical_function_tag(cls, args: dict) -> dict: method _configure_checkpoint (line 105) | def _configure_checkpoint( method _check_row_condition (line 178) | def _check_row_condition( method _add_suite (line 207) | def _add_suite( method _check_expectation_result (line 258) | def _check_expectation_result(cls, result_dict: dict) -> dict: method run_dq_process (line 280) | def run_dq_process(cls, dq_spec: DQSpec, data: DataFrame) -> DataFrame: method _check_critical_functions_tags (line 381) | def _check_critical_functions_tags(cls, failed_expectations: dict) -> ... method _check_chunk_usage (line 398) | def _check_chunk_usage(cls, results_dict: dict, dq_spec: DQSpec) -> bool: method _explode_results (line 423) | def _explode_results( method _get_data_context_config (line 530) | def _get_data_context_config(cls, dq_spec: DQSpec) -> DataContextConfig: method _get_data_source_defaults (line 563) | def _get_data_source_defaults(cls, dq_spec: DQSpec) -> dict: method _get_failed_expectations (line 595) | def _get_failed_expectations( method _get_unexpected_rows_pk (line 652) | def _get_unexpected_rows_pk(cls, dq_spec: DQSpec) -> Optional[list]: method _log_or_fail (line 675) | def _log_or_fail( method _transform_checkpoint_results (line 722) | def _transform_checkpoint_results( method _process_chunk (line 774) | def _process_chunk( method _cast_columns_to_string (line 836) | def _cast_columns_to_string(cls, df: DataFrame) -> DataFrame: method _generate_chunks (line 851) | def _generate_chunks(cls, results_dict: dict, dq_spec: DQSpec) -> list: method _split_into_chunks (line 881) | def _split_into_chunks(cls, results_dict: dict, dq_spec: DQSpec) -> list: method _write_to_location (line 922) | def _write_to_location( method split_into_chunks (line 979) | def split_into_chunks(lst: list, chunk_size: int) -> list: FILE: lakehouse_engine/dq_processors/exceptions.py class DQValidationsFailedException (line 4) | class DQValidationsFailedException(Exception): class DQCheckpointsResultsException (line 10) | class DQCheckpointsResultsException(Exception): class DQSpecMalformedException (line 16) | class DQSpecMalformedException(Exception): class DQDuplicateRuleIdException (line 22) | class DQDuplicateRuleIdException(Exception): FILE: lakehouse_engine/dq_processors/validator.py class Validator (line 24) | class Validator(object): method get_dq_validator (line 30) | def get_dq_validator( method tag_source_with_dq (line 73) | def tag_source_with_dq( method _add_critical_function_tag (line 112) | def _add_critical_function_tag(cls, args: dict) -> dict: method _get_row_tagged_fail_df (line 136) | def _get_row_tagged_fail_df( method _join_complementary_data (line 230) | def _join_complementary_data( FILE: lakehouse_engine/engine.py function load_data (line 29) | def load_data( function execute_reconciliation (line 56) | def execute_reconciliation( function execute_dq_validation (line 85) | def execute_dq_validation( function manage_table (line 116) | def manage_table( function execute_manager (line 140) | def execute_manager( function manage_files (line 170) | def manage_files( function execute_sensor (line 194) | def execute_sensor( function execute_sensor_heartbeat (line 220) | def execute_sensor_heartbeat( function trigger_heartbeat_sensor_jobs (line 272) | def trigger_heartbeat_sensor_jobs( function execute_heartbeat_sensor_data_feed (line 284) | def execute_heartbeat_sensor_data_feed( function update_heartbeat_sensor_status (line 303) | def update_heartbeat_sensor_status( function update_sensor_status (line 326) | def update_sensor_status( function generate_sensor_query (line 360) | def generate_sensor_query( function generate_sensor_sap_logchain_query (line 409) | def generate_sensor_sap_logchain_query( function send_notification (line 438) | def send_notification(args: dict) -> None: function execute_gab (line 452) | def execute_gab( FILE: lakehouse_engine/io/exceptions.py class IncrementalFilterInputNotFoundException (line 4) | class IncrementalFilterInputNotFoundException(Exception): class WrongIOFormatException (line 15) | class WrongIOFormatException(Exception): class NotSupportedException (line 21) | class NotSupportedException(RuntimeError): class InputNotFoundException (line 27) | class InputNotFoundException(Exception): class EndpointNotFoundException (line 33) | class EndpointNotFoundException(Exception): class LocalPathNotFoundException (line 39) | class LocalPathNotFoundException(Exception): class WriteToLocalException (line 45) | class WriteToLocalException(Exception): class SharePointAPIError (line 51) | class SharePointAPIError(Exception): class InvalidSharepointPathException (line 57) | class InvalidSharepointPathException(Exception): FILE: lakehouse_engine/io/reader.py class Reader (line 11) | class Reader(ABC): method __init__ (line 14) | def __init__(self, input_spec: InputSpec): method read (line 24) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/reader_factory.py class ReaderFactory (line 19) | class ReaderFactory(ABC): # noqa: B024 method get_data (line 23) | def get_data(cls, spec: InputSpec) -> DataFrame: FILE: lakehouse_engine/io/readers/dataframe_reader.py class DataFrameReader (line 9) | class DataFrameReader(Reader): method __init__ (line 12) | def __init__(self, input_spec: InputSpec): method read (line 20) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/readers/file_reader.py class FileReader (line 11) | class FileReader(Reader): method __init__ (line 14) | def __init__(self, input_spec: InputSpec): method read (line 22) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/readers/jdbc_reader.py class JDBCReader (line 15) | class JDBCReader(Reader): method __init__ (line 18) | def __init__(self, input_spec: InputSpec): method read (line 26) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/readers/kafka_reader.py class KafkaReader (line 10) | class KafkaReader(Reader): method __init__ (line 13) | def __init__(self, input_spec: InputSpec): method read (line 21) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/readers/query_reader.py class QueryReader (line 10) | class QueryReader(Reader): method __init__ (line 13) | def __init__(self, input_spec: InputSpec): method read (line 21) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/readers/sap_b4_reader.py class SAPB4Reader (line 19) | class SAPB4Reader(Reader): method __init__ (line 24) | def __init__(self, input_spec: InputSpec): method read (line 33) | def read(self) -> DataFrame: method _get_jdbc_utils (line 42) | def _get_jdbc_utils(self) -> SAPB4ExtractionUtils: method _get_options (line 142) | def _get_options(self) -> Tuple[dict, dict]: FILE: lakehouse_engine/io/readers/sap_bw_reader.py class SAPBWReader (line 18) | class SAPBWReader(Reader): method __init__ (line 23) | def __init__(self, input_spec: InputSpec): method read (line 32) | def read(self) -> DataFrame: method _get_jdbc_utils (line 41) | def _get_jdbc_utils(self) -> SAPBWExtractionUtils: method _get_options (line 147) | def _get_options(self) -> Tuple[dict, dict]: FILE: lakehouse_engine/io/readers/sftp_reader.py class SFTPReader (line 23) | class SFTPReader(Reader): method __init__ (line 28) | def __init__(self, input_spec: InputSpec): method read (line 36) | def read(self) -> DataFrame: method _append_files (line 92) | def _append_files(cls, pdf: PandasDataFrame, dfs: List) -> List: method _read_files (line 107) | def _read_files( FILE: lakehouse_engine/io/readers/sharepoint_reader.py class SharepointReader (line 26) | class SharepointReader(Reader): method __init__ (line 29) | def __init__(self, input_spec: InputSpec): method read (line 88) | def read(self) -> DataFrame: method _get_sharepoint_utils (line 110) | def _get_sharepoint_utils(self) -> SharepointUtils: class SharepointCsvReader (line 134) | class SharepointCsvReader(SharepointReader): method read (line 141) | def read(self, file_path: str = None, pattern: str = None) -> DataFrame: method _load_and_archive_file (line 183) | def _load_and_archive_file(self, sp_file: SharepointFile) -> DataFrame: method _get_csv_files_in_folder (line 251) | def _get_csv_files_in_folder( method _load_csv_to_spark (line 293) | def _load_csv_to_spark( method read_csv_folder (line 340) | def read_csv_folder(self, folder_path: str, pattern: str = None) -> Da... method _validate_and_read_file (line 401) | def _validate_and_read_file( method _handle_file_error (line 436) | def _handle_file_error( method detect_delimiter (line 470) | def detect_delimiter( method resolve_spark_csv_options (line 535) | def resolve_spark_csv_options(self, file_content: bytes) -> dict: class SharepointExcelReader (line 599) | class SharepointExcelReader(SharepointReader): method read (line 602) | def read(self) -> DataFrame: class SharepointReaderFactory (line 614) | class SharepointReaderFactory: method get_reader (line 621) | def get_reader(input_spec: InputSpec) -> SharepointReader: FILE: lakehouse_engine/io/readers/table_reader.py class TableReader (line 10) | class TableReader(Reader): method __init__ (line 13) | def __init__(self, input_spec: InputSpec): method read (line 21) | def read(self) -> DataFrame: FILE: lakehouse_engine/io/writer.py class Writer (line 14) | class Writer(ABC): method __init__ (line 17) | def __init__( method write (line 33) | def write(self) -> Optional[OrderedDict]: method write_transformed_micro_batch (line 38) | def write_transformed_micro_batch(**kwargs: Any) -> Callable: method get_transformed_micro_batch (line 59) | def get_transformed_micro_batch( method get_streaming_trigger (line 90) | def get_streaming_trigger(cls, output_spec: OutputSpec) -> Dict: method run_micro_batch_dq_process (line 117) | def run_micro_batch_dq_process(df: DataFrame, dq_spec: List[DQSpec]) -... FILE: lakehouse_engine/io/writer_factory.py class WriterFactory (line 26) | class WriterFactory(ABC): # noqa: B024 method _get_writer_name (line 42) | def _get_writer_name(cls, spec: OutputSpec) -> str: method get_writer (line 64) | def get_writer(cls, spec: OutputSpec, df: DataFrame, data: OrderedDict... FILE: lakehouse_engine/io/writers/console_writer.py class ConsoleWriter (line 13) | class ConsoleWriter(Writer): method __init__ (line 18) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 28) | def write(self) -> None: method _show_df (line 43) | def _show_df(df: DataFrame, output_spec: OutputSpec) -> None: method _show_streaming_df (line 57) | def _show_streaming_df(output_spec: OutputSpec) -> Callable: method _write_to_console_in_streaming_mode (line 75) | def _write_to_console_in_streaming_mode( method _write_transformed_micro_batch (line 103) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/dataframe_writer.py class DataFrameWriter (line 17) | class DataFrameWriter(Writer): method __init__ (line 22) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 33) | def write(self) -> Optional[OrderedDict]: method _get_prefixed_view_name (line 61) | def _get_prefixed_view_name(self, stream_df_view_name: str) -> str: method _create_temp_view (line 65) | def _create_temp_view(self, df: DataFrame, stream_df_view_name: str) -... method _write_streaming_df (line 80) | def _write_streaming_df(self, stream_df_view_name: str) -> Callable: method _write_to_dataframe_in_streaming_mode (line 96) | def _write_to_dataframe_in_streaming_mode( method _table_exists (line 158) | def _table_exists(self, table_name: str) -> bool: method _write_transformed_micro_batch (line 172) | def _write_transformed_micro_batch( FILE: lakehouse_engine/io/writers/delta_merge_writer.py class DeltaMergeWriter (line 14) | class DeltaMergeWriter(Writer): method __init__ (line 17) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 28) | def write(self) -> None: method _get_delta_table (line 53) | def _get_delta_table(output_spec: OutputSpec) -> DeltaTable: method _insert (line 75) | def _insert( method _merge (line 110) | def _merge(delta_table: DeltaTable, output_spec: OutputSpec, df: DataF... method _update (line 142) | def _update( method _write_transformed_micro_batch (line 177) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/file_writer.py class FileWriter (line 12) | class FileWriter(Writer): method __init__ (line 15) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 25) | def write(self) -> None: method _write_to_files_in_batch_mode (line 35) | def _write_to_files_in_batch_mode(df: DataFrame, output_spec: OutputSp... method _write_to_files_in_streaming_mode (line 51) | def _write_to_files_in_streaming_mode( method _write_transformed_micro_batch (line 87) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/jdbc_writer.py class JDBCWriter (line 12) | class JDBCWriter(Writer): method __init__ (line 15) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 25) | def write(self) -> None: method _write_to_jdbc_in_batch_mode (line 49) | def _write_to_jdbc_in_batch_mode(df: DataFrame, output_spec: OutputSpe... method _write_transformed_micro_batch (line 65) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/kafka_writer.py class KafkaWriter (line 12) | class KafkaWriter(Writer): method __init__ (line 15) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 25) | def write(self) -> None: method _write_to_kafka_in_batch_mode (line 35) | def _write_to_kafka_in_batch_mode(df: DataFrame, output_spec: OutputSp... method _write_to_kafka_in_streaming_mode (line 47) | def _write_to_kafka_in_streaming_mode( method _write_transformed_micro_batch (line 81) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/rest_api_writer.py class RestApiWriter (line 20) | class RestApiWriter(Writer): method __init__ (line 25) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 35) | def write(self) -> None: method _get_func_to_send_payload_to_rest_api (line 45) | def _get_func_to_send_payload_to_rest_api(output_spec: OutputSpec) -> ... method _write_to_rest_api_in_batch_mode (line 142) | def _write_to_rest_api_in_batch_mode( method _write_to_rest_api_in_streaming_mode (line 162) | def _write_to_rest_api_in_streaming_mode( method _write_transformed_micro_batch (line 186) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/io/writers/sharepoint_writer.py class SharepointWriter (line 19) | class SharepointWriter(Writer): method __init__ (line 29) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 41) | def write(self) -> None: method _get_sharepoint_utils (line 54) | def _get_sharepoint_utils(self) -> SharepointUtils: method _write_to_sharepoint_in_batch_mode (line 72) | def _write_to_sharepoint_in_batch_mode(self, df: DataFrame) -> None: FILE: lakehouse_engine/io/writers/table_writer.py class TableWriter (line 12) | class TableWriter(Writer): method __init__ (line 15) | def __init__(self, output_spec: OutputSpec, df: DataFrame, data: Order... method write (line 25) | def write(self) -> None: method _write_to_table_in_batch_mode (line 74) | def _write_to_table_in_batch_mode(df: DataFrame, output_spec: OutputSp... method _write_to_table_in_streaming_mode (line 99) | def _write_to_table_in_streaming_mode( method _write_transformed_micro_batch (line 133) | def _write_transformed_micro_batch( # type: ignore FILE: lakehouse_engine/terminators/cdf_processor.py class CDFProcessor (line 24) | class CDFProcessor(object): method expose_cdf (line 30) | def expose_cdf(cls, spec: TerminatorSpec) -> None: method _write_cdf_to_external (line 65) | def _write_cdf_to_external( method _get_table_cdf_input_specs (line 90) | def _get_table_cdf_input_specs(spec: TerminatorSpec) -> InputSpec: method delete_old_data (line 115) | def delete_old_data(cls, spec: TerminatorSpec) -> None: method vacuum_cdf_data (line 134) | def vacuum_cdf_data(cls, spec: TerminatorSpec) -> None: FILE: lakehouse_engine/terminators/dataset_optimizer.py class DatasetOptimizer (line 12) | class DatasetOptimizer(object): method optimize_dataset (line 18) | def optimize_dataset( method _compute_table_stats (line 79) | def _compute_table_stats(cls, db_table: str) -> None: method _vacuum (line 93) | def _vacuum(cls, db_table: str, location: str, hours: int) -> None: method _optimize (line 115) | def _optimize( FILE: lakehouse_engine/terminators/notifier.py class Notifier (line 19) | class Notifier(ABC): method __init__ (line 24) | def __init__(self, notification_spec: TerminatorSpec): method create_notification (line 34) | def create_notification(self) -> None: method send_notification (line 39) | def send_notification(self) -> None: method _render_notification_field (line 43) | def _render_notification_field(self, template_field: str) -> str: method check_if_notification_is_failure_notification (line 70) | def check_if_notification_is_failure_notification( FILE: lakehouse_engine/terminators/notifier_factory.py class NotifierFactory (line 9) | class NotifierFactory(object): method get_notifier (line 15) | def get_notifier(cls, spec: TerminatorSpec) -> Notifier: method generate_failure_notification (line 35) | def generate_failure_notification(spec: list, exception: Exception) ->... FILE: lakehouse_engine/terminators/notifiers/email_notifier.py class EmailNotifier (line 24) | class EmailNotifier(Notifier): method __init__ (line 29) | def __init__(self, notification_spec: TerminatorSpec): method create_notification (line 37) | def create_notification(self) -> None: method send_notification (line 68) | def send_notification(self) -> None: method _authenticate_and_send_office365 (line 89) | def _authenticate_and_send_office365(self) -> None: method _authenticate_and_send_simple_smtp (line 118) | def _authenticate_and_send_simple_smtp(self) -> None: method _validate_email_notification (line 181) | def _validate_email_notification(self) -> None: method _get_importance (line 204) | def _get_importance(self, importance: str) -> Any: method _create_graph_api_email_body (line 229) | def _create_graph_api_email_body(self) -> Any: method _set_graph_api_recipients (line 293) | def _set_graph_api_recipients(self, recipient_type: str) -> list: FILE: lakehouse_engine/terminators/notifiers/exceptions.py class NotifierNotFoundException (line 4) | class NotifierNotFoundException(Exception): class NotifierConfigException (line 10) | class NotifierConfigException(Exception): class NotifierTemplateNotFoundException (line 16) | class NotifierTemplateNotFoundException(Exception): class NotifierTemplateConfigException (line 22) | class NotifierTemplateConfigException(Exception): FILE: lakehouse_engine/terminators/notifiers/notification_templates.py class NotificationsTemplates (line 4) | class NotificationsTemplates(object): FILE: lakehouse_engine/terminators/sensor_terminator.py class SensorTerminator (line 11) | class SensorTerminator(object): method update_sensor_status (line 17) | def update_sensor_status( FILE: lakehouse_engine/terminators/spark_terminator.py class SparkTerminator (line 7) | class SparkTerminator(object): method terminate_spark (line 13) | def terminate_spark(cls) -> None: FILE: lakehouse_engine/terminators/terminator_factory.py class TerminatorFactory (line 13) | class TerminatorFactory(object): method execute_terminator (line 19) | def execute_terminator( FILE: lakehouse_engine/transformers/aggregators.py class Aggregators (line 11) | class Aggregators(object): method get_max_value (line 17) | def get_max_value(input_col: str, output_col: str = "latest") -> Calla... FILE: lakehouse_engine/transformers/column_creators.py class ColumnCreators (line 15) | class ColumnCreators(object): method with_row_id (line 21) | def with_row_id( method with_auto_increment_id (line 47) | def with_auto_increment_id( method with_literals (line 86) | def with_literals( FILE: lakehouse_engine/transformers/column_reshapers.py class ColumnReshapers (line 24) | class ColumnReshapers(object): method cast (line 30) | def cast(cls, cols: Dict[str, str]) -> Callable: method column_selector (line 54) | def column_selector(cls, cols: OrderedDict) -> Callable: method flatten_schema (line 72) | def flatten_schema( method explode_columns (line 115) | def explode_columns( method _get_columns (line 160) | def _get_columns( method with_expressions (line 181) | def with_expressions(cls, cols_and_exprs: Dict[str, str]) -> Callable: method rename (line 207) | def rename(cls, cols: Dict[str, str], escape_col_names: bool = True) -... method from_avro (line 238) | def from_avro( method from_avro_with_registry (line 285) | def from_avro_with_registry( method from_json (line 349) | def from_json( method to_json (line 412) | def to_json( method _explode_arrays (line 441) | def _explode_arrays(cls, df: DataFrame, cols_to_explode: List[str]) ->... method _explode_maps (line 460) | def _explode_maps(cls, df: DataFrame, cols_to_explode: List[str]) -> D... FILE: lakehouse_engine/transformers/condensers.py class Condensers (line 15) | class Condensers(object): method condense_record_mode_cdc (line 21) | def condense_record_mode_cdc( method group_and_rank (line 91) | def group_and_rank( FILE: lakehouse_engine/transformers/custom_transformers.py class CustomTransformers (line 8) | class CustomTransformers(object): method custom_transformation (line 12) | def custom_transformation(custom_transformer: Callable) -> Callable: method sql_transformation (line 43) | def sql_transformation(sql: str) -> Callable: FILE: lakehouse_engine/transformers/data_maskers.py class DataMaskers (line 12) | class DataMaskers(object): method hash_masker (line 18) | def hash_masker( method column_dropper (line 55) | def column_dropper(cls, cols: List[str]) -> Callable: FILE: lakehouse_engine/transformers/date_transformers.py class DateTransformers (line 12) | class DateTransformers(object): method add_current_date (line 18) | def add_current_date(output_col: str) -> Callable: method convert_to_date (line 38) | def convert_to_date( method convert_to_timestamp (line 66) | def convert_to_timestamp( method format_date (line 95) | def format_date(cols: List[str], target_format: Optional[str] = None) ... method get_date_hierarchy (line 121) | def get_date_hierarchy(cols: List[str], formats: Optional[dict] = None... FILE: lakehouse_engine/transformers/exceptions.py class WrongArgumentsException (line 4) | class WrongArgumentsException(Exception): class UnsupportedStreamingTransformerException (line 10) | class UnsupportedStreamingTransformerException(Exception): FILE: lakehouse_engine/transformers/filters.py class Filters (line 12) | class Filters(object): method incremental_filter (line 18) | def incremental_filter( method expression_filter (line 89) | def expression_filter(exp: str) -> Callable: method column_filter_exp (line 107) | def column_filter_exp(exp: List[str]) -> Callable: method drop_duplicate_rows (line 125) | def drop_duplicate_rows( FILE: lakehouse_engine/transformers/joiners.py class Joiners (line 14) | class Joiners(object): method join (line 20) | def join( FILE: lakehouse_engine/transformers/null_handlers.py class NullHandlers (line 10) | class NullHandlers(object): method replace_nulls (line 16) | def replace_nulls( FILE: lakehouse_engine/transformers/optimizers.py class Optimizers (line 11) | class Optimizers(object): method cache (line 17) | def cache(cls) -> Callable: method persist (line 34) | def persist(cls, storage_level: str = None) -> Callable: method unpersist (line 58) | def unpersist(cls, blocking: bool = False) -> Callable: FILE: lakehouse_engine/transformers/regex_transformers.py class RegexTransformers (line 11) | class RegexTransformers(object): method with_regex_value (line 17) | def with_regex_value( FILE: lakehouse_engine/transformers/repartitioners.py class Repartitioners (line 11) | class Repartitioners(object): method coalesce (line 17) | def coalesce(cls, num_partitions: int) -> Callable: method repartition (line 35) | def repartition( FILE: lakehouse_engine/transformers/transformer_factory.py class TransformerFactory (line 24) | class TransformerFactory(object): method get_transformer (line 80) | def get_transformer(spec: TransformerSpec, data: OrderedDict = None) -... method _get_spec_args_copy (line 129) | def _get_spec_args_copy(spec_args: dict) -> dict: FILE: lakehouse_engine/transformers/unions.py class Unions (line 11) | class Unions(object): method union (line 17) | def union( method union_by_name (line 42) | def union_by_name( FILE: lakehouse_engine/transformers/watermarker.py class Watermarker (line 10) | class Watermarker(object): method with_watermark (line 16) | def with_watermark(watermarker_column: str, watermarker_time: str) -> ... FILE: lakehouse_engine/utils/acon_utils.py function validate_manager_list (line 17) | def validate_manager_list(acon: dict) -> list: function validate_and_resolve_acon (line 38) | def validate_and_resolve_acon(acon: dict, execution_point: str = "") -> ... function validate_readers (line 62) | def validate_readers(acon: dict) -> None: function validate_writers (line 82) | def validate_writers(acon: dict) -> None: function validate_managers (line 99) | def validate_managers(acon: dict, error_list: list = None) -> None: function validate_mandatory_parameters (line 155) | def validate_mandatory_parameters(acon: dict, expected_params: dict) -> ... function validate_parameter_types (line 173) | def validate_parameter_types(acon: dict, expected_params: dict) -> list: function resolve_dq_functions (line 207) | def resolve_dq_functions(acon: dict, execution_point: str) -> dict: FILE: lakehouse_engine/utils/configs/config_utils.py class ConfigUtils (line 13) | class ConfigUtils(object): method get_acon (line 29) | def get_acon( method get_config (line 52) | def get_config(package: str = "lakehouse_engine.configs") -> Any: method get_config_from_file (line 68) | def get_config_from_file(config_file_path: str) -> Any: method get_engine_version (line 83) | def get_engine_version(cls) -> str: method read_json_acon (line 97) | def read_json_acon(path: str, disable_dbfs_retry: bool = False) -> Any: method read_sql (line 110) | def read_sql(path: str, disable_dbfs_retry: bool = False) -> Any: method remove_sensitive_info (line 123) | def remove_sensitive_info(cls, dict_to_replace: dict | list) -> dict |... FILE: lakehouse_engine/utils/databricks_utils.py class DatabricksUtils (line 15) | class DatabricksUtils(object): method is_serverless_workload (line 21) | def is_serverless_workload() -> bool: method get_db_utils (line 33) | def get_db_utils(spark: SparkSession) -> Any: method get_databricks_job_information (line 56) | def get_databricks_job_information(spark: SparkSession) -> Tuple[str, ... method _get_dp_name (line 80) | def _get_dp_name(job_name: str) -> str: method get_spark_conf_values (line 96) | def get_spark_conf_values(usage_stats: dict, spark_confs: dict) -> None: method get_usage_context_for_serverless (line 137) | def get_usage_context_for_serverless(cls, usage_stats: dict) -> None: FILE: lakehouse_engine/utils/dq_utils.py class DQUtils (line 15) | class DQUtils: method import_dq_rules_from_table (line 19) | def import_dq_rules_from_table( method validate_dq_functions (line 117) | def validate_dq_functions( class PrismaUtils (line 166) | class PrismaUtils: method build_prisma_dq_spec (line 170) | def build_prisma_dq_spec(spec: dict, execution_point: str) -> dict: method validate_rule_id_duplication (line 240) | def validate_rule_id_duplication( FILE: lakehouse_engine/utils/engine_usage_stats.py class EngineUsageStats (line 15) | class EngineUsageStats(object): method store_engine_usage (line 21) | def store_engine_usage( method _should_collect_usage (line 75) | def _should_collect_usage(cls, collect_engine_usage: str) -> bool: method _prepare_usage_stats (line 84) | def _prepare_usage_stats(cls, acon: dict, spark_confs: dict) -> dict: method _select_usage_path (line 93) | def _select_usage_path( method _add_metadata_to_stats (line 103) | def _add_metadata_to_stats( FILE: lakehouse_engine/utils/expectations_utils.py function validate_result (line 6) | def validate_result( function _get_example_unexpected_index_list (line 36) | def _get_example_unexpected_index_list(expectation_configuration: Any) -... function _get_test_unexpected_index_list (line 68) | def _get_test_unexpected_index_list(metric_name: str, metrics: Dict) -> ... FILE: lakehouse_engine/utils/extraction/jdbc_extraction_utils.py class JDBCExtractionType (line 14) | class JDBCExtractionType(Enum): class JDBCExtraction (line 22) | class JDBCExtraction(object): class JDBCExtractionUtils (line 101) | class JDBCExtractionUtils(object): method __init__ (line 104) | def __init__(self, jdbc_extraction: Any): method get_additional_spark_options (line 115) | def get_additional_spark_options( method get_predicates (line 163) | def get_predicates(self, predicates_query: str) -> List: method get_spark_jdbc_options (line 207) | def get_spark_jdbc_options(self) -> Tuple[dict, dict]: method get_spark_jdbc_optimal_upper_bound (line 248) | def get_spark_jdbc_optimal_upper_bound(self) -> Any: method _get_extraction_partition_opts (line 302) | def _get_extraction_partition_opts( method _get_max_timestamp (line 322) | def _get_max_timestamp(self, max_timestamp_query: str) -> str: method _get_delta_query (line 361) | def _get_delta_query(self) -> Tuple[str, str]: method _get_init_query (line 366) | def _get_init_query(self) -> Tuple[str, str]: FILE: lakehouse_engine/utils/extraction/sap_b4_extraction_utils.py class ADSOTypes (line 18) | class ADSOTypes(Enum): class SAPB4Extraction (line 27) | class SAPB4Extraction(JDBCExtraction): class SAPB4ExtractionUtils (line 81) | class SAPB4ExtractionUtils(JDBCExtractionUtils): method __init__ (line 84) | def __init__(self, sap_b4_extraction: SAPB4Extraction): method get_data_target (line 104) | def get_data_target(input_spec_opt: dict) -> str: method _get_init_query (line 126) | def _get_init_query(self) -> Tuple[str, str]: method _get_init_extraction_query (line 143) | def _get_init_extraction_query(self) -> str: method _get_delta_query (line 166) | def _get_delta_query(self) -> Tuple[str, str]: method _get_req_status_tbl_filter (line 261) | def _get_req_status_tbl_filter(self) -> Any: FILE: lakehouse_engine/utils/extraction/sap_bw_extraction_utils.py class SAPBWExtraction (line 18) | class SAPBWExtraction(JDBCExtraction): class SAPBWExtractionUtils (line 78) | class SAPBWExtractionUtils(JDBCExtractionUtils): method __init__ (line 81) | def __init__(self, sap_bw_extraction: SAPBWExtraction): method get_changelog_table (line 99) | def get_changelog_table(self) -> str: method get_odsobject (line 175) | def get_odsobject(input_spec_opt: dict) -> str: method get_logsys_cond (line 194) | def get_logsys_cond(self) -> str: method _get_init_query (line 208) | def _get_init_query(self) -> Tuple[str, str]: method _get_init_extraction_query (line 236) | def _get_init_extraction_query(self) -> str: method _get_init_extraction_query_act_req_timestamp (line 265) | def _get_init_extraction_query_act_req_timestamp(self) -> str: method _get_delta_query (line 287) | def _get_delta_query(self) -> Tuple[str, str]: FILE: lakehouse_engine/utils/extraction/sftp_extraction_utils.py class SFTPInputFormat (line 19) | class SFTPInputFormat(Enum): class SFTPExtractionFilter (line 28) | class SFTPExtractionFilter(Enum): class SFTPExtractionUtils (line 38) | class SFTPExtractionUtils(object): method get_files_list (line 44) | def get_files_list( method get_sftp_client (line 103) | def get_sftp_client( method validate_format (line 228) | def validate_format(cls, files_format: str) -> str: method validate_location (line 252) | def validate_location(cls, location: str) -> str: method _file_has_pattern (line 264) | def _file_has_pattern(cls, item: SFTPAttributes, options_args: dict) -... method _file_in_date_interval (line 287) | def _file_in_date_interval( method _get_earliest_latest_file (line 345) | def _get_earliest_latest_file( method _get_folder_items (line 389) | def _get_folder_items( method _get_host_keys (line 421) | def _get_host_keys(cls, pkey: str, key_type: str) -> PKey: method _is_compressed (line 442) | def _is_compressed(cls, filename: str) -> Any: method _validate_date (line 454) | def _validate_date(cls, date_text: str) -> datetime: FILE: lakehouse_engine/utils/file_utils.py function get_file_names_without_file_type (line 8) | def get_file_names_without_file_type( function get_directory_path (line 33) | def get_directory_path(path: str) -> str: FILE: lakehouse_engine/utils/gab_utils.py class GABUtils (line 18) | class GABUtils(object): method logger (line 23) | def logger( method _escape_quote (line 90) | def _escape_quote(cls, to_escape: str) -> str: method get_json_column_as_dict (line 99) | def get_json_column_as_dict( method extract_columns_from_mapping (line 125) | def extract_columns_from_mapping( method _extract_column_with_alias (line 182) | def _extract_column_with_alias( method _get_column_format_without_alias (line 211) | def _get_column_format_without_alias( method get_cadence_configuration_at_end_date (line 236) | def get_cadence_configuration_at_end_date(cls, end_date: datetime) -> ... method get_reconciliation_cadences (line 283) | def get_reconciliation_cadences( method _get_cadences_to_execute (line 307) | def _get_cadences_to_execute( method _sort_cadences_to_execute (line 337) | def _sort_cadences_to_execute( method _get_configured_cadences_by_snapshot (line 367) | def _get_configured_cadences_by_snapshot( method _generate_reconciliation_by_snapshot (line 410) | def _generate_reconciliation_by_snapshot( method _add_cadence_snapshot_to_cadence_snapshot_config (line 454) | def _add_cadence_snapshot_to_cadence_snapshot_config( method format_datetime_to_default (line 474) | def format_datetime_to_default(cls, date_to_format: datetime) -> str: class GABPartitionUtils (line 483) | class GABPartitionUtils(object): method get_years (line 489) | def get_years(cls, start_date: str, end_date: str) -> list[str]: method get_partition_condition (line 509) | def get_partition_condition(cls, start_date: str, end_date: str) -> str: method _get_multiple_years_partition (line 526) | def _get_multiple_years_partition( method _get_single_year_partition (line 588) | def _get_single_year_partition(cls, start_date: str, end_date: str) ->... method _extract_date_part_from_date (line 641) | def _extract_date_part_from_date(cls, part: str, date: str) -> str: FILE: lakehouse_engine/utils/logging_handler.py class FilterSensitiveData (line 32) | class FilterSensitiveData(logging.Filter): method filter (line 35) | def filter(self, record: logging.LogRecord) -> bool: # noqa: A003 class LoggingHandler (line 53) | class LoggingHandler(object): method __init__ (line 56) | def __init__(self, class_name: str): method get_logger (line 72) | def get_logger(self) -> logging.Logger: FILE: lakehouse_engine/utils/rest_api.py class RestMethods (line 16) | class RestMethods(Enum): class RestStatusCodes (line 24) | class RestStatusCodes(Enum): class RESTApiException (line 31) | class RESTApiException(requests.RequestException): method __init__ (line 34) | def __init__(self, message: str) -> None: function get_basic_auth (line 43) | def get_basic_auth(username: str, password: str) -> requests.auth.HTTPBa... function get_configured_session (line 56) | def get_configured_session( function execute_api_request (line 97) | def execute_api_request( FILE: lakehouse_engine/utils/schema_utils.py class SchemaUtils (line 15) | class SchemaUtils(object): method from_file (line 21) | def from_file(file_path: str, disable_dbfs_retry: bool = False) -> Str... method from_file_to_dict (line 37) | def from_file_to_dict(file_path: str, disable_dbfs_retry: bool = False... method from_dict (line 51) | def from_dict(struct_type: dict) -> StructType: method from_table_schema (line 64) | def from_table_schema(table: str) -> StructType: method from_input_spec (line 76) | def from_input_spec(cls, input_spec: InputSpec) -> Optional[StructType]: method _get_prefix_alias (line 110) | def _get_prefix_alias(num_chars: int, prefix: str, shorten_names: bool... method schema_flattener (line 121) | def schema_flattener( FILE: lakehouse_engine/utils/sharepoint_utils.py class SharepointUtils (line 30) | class SharepointUtils(object): method __init__ (line 33) | def __init__( method _get_token (line 98) | def _get_token(self) -> None: method _create_app (line 107) | def _create_app(self) -> None: method _make_request (line 129) | def _make_request( method _parse_json (line 185) | def _parse_json(self, response: requests.Response, context: str) -> Di... method _get_site_id (line 219) | def _get_site_id(self) -> str: method _get_drive_id (line 261) | def _get_drive_id(self) -> str: method check_if_endpoint_exists (line 302) | def check_if_endpoint_exists( method check_if_local_path_exists (line 344) | def check_if_local_path_exists(self, local_path: str) -> None: method save_to_staging_area (line 358) | def save_to_staging_area(self, sp_file: SharepointFile) -> str: method download_file_streaming (line 390) | def download_file_streaming(self, sp_file: SharepointFile) -> str: method write_bytes_to_local_file (line 427) | def write_bytes_to_local_file(self, sp_file: SharepointFile) -> str: method write_to_local_path (line 457) | def write_to_local_path(self, df: DataFrame) -> None: method _rename_local_file (line 486) | def _rename_local_file(self, local_path: str, file_name: str) -> None: method write_to_sharepoint (line 504) | def write_to_sharepoint(self) -> None: method delete_local_path (line 566) | def delete_local_path(self) -> None: method staging_area (line 581) | def staging_area(self) -> Generator[str, None, None]: method list_items_in_path (line 598) | def list_items_in_path(self, path: str) -> list[Any]: method get_file_metadata (line 664) | def get_file_metadata(self, file_path: str) -> SharepointFile: method archive_sharepoint_file (line 723) | def archive_sharepoint_file( method _rename_sharepoint_file (line 767) | def _rename_sharepoint_file(self, sp_file: SharepointFile) -> str: method _move_file_in_sharepoint (line 827) | def _move_file_in_sharepoint(self, sp_file: SharepointFile, to_path: s... method _create_folder_in_sharepoint (line 900) | def _create_folder_in_sharepoint(self, folder_path: str) -> None: FILE: lakehouse_engine/utils/spark_utils.py class SparkUtils (line 8) | class SparkUtils(object): method create_temp_view (line 12) | def create_temp_view( FILE: lakehouse_engine/utils/sql_parser_utils.py class SQLParserUtils (line 6) | class SQLParserUtils(object): method split_sql_commands (line 9) | def split_sql_commands( method _split_sql_commands (line 34) | def _split_sql_commands(self) -> list[str]: method _get_substring (line 109) | def _get_substring(self, first_char: int = None, last_char: int = None... method _validate_command_is_closed (line 121) | def _validate_command_is_closed(self, index: int, dependencies: int) -... method _character_validation (line 149) | def _character_validation(self, value: str | list) -> bool: method _add_new_command (line 165) | def _add_new_command(self, sql_command: str) -> None: method _update_value (line 173) | def _update_value(self, value: int, operation: str, condition: bool = ... FILE: lakehouse_engine/utils/storage/dbfs_storage.py class DBFSStorage (line 11) | class DBFSStorage(FileStorage): method get_file_payload (line 18) | def get_file_payload(cls, url: ParseResult) -> Any: method write_payload_to_file (line 36) | def write_payload_to_file(cls, url: ParseResult, content: str) -> None: FILE: lakehouse_engine/utils/storage/file_storage.py class FileStorage (line 8) | class FileStorage(ABC): method get_file_payload (line 13) | def get_file_payload(cls, url: ParseResult) -> Any: method write_payload_to_file (line 26) | def write_payload_to_file(cls, url: ParseResult, content: str) -> None: FILE: lakehouse_engine/utils/storage/file_storage_functions.py class FileStorageFunctions (line 15) | class FileStorageFunctions(ABC): # noqa: B024 method read_json (line 19) | def read_json(cls, path: str, disable_dbfs_retry: bool = False) -> Any: method read_sql (line 50) | def read_sql(cls, path: str, disable_dbfs_retry: bool = False) -> Any: method write_payload (line 81) | def write_payload( method is_boto3_configured (line 108) | def is_boto3_configured() -> bool: FILE: lakehouse_engine/utils/storage/local_fs_storage.py class LocalFSStorage (line 11) | class LocalFSStorage(FileStorage): method get_file_payload (line 17) | def get_file_payload(cls, url: ParseResult) -> TextIO: method write_payload_to_file (line 30) | def write_payload_to_file(cls, url: ParseResult, content: str) -> None: FILE: lakehouse_engine/utils/storage/s3_storage.py class S3Storage (line 12) | class S3Storage(FileStorage): method get_file_payload (line 18) | def get_file_payload(cls, url: ParseResult) -> Any: method write_payload_to_file (line 36) | def write_payload_to_file(cls, url: ParseResult, content: str) -> None: FILE: lakehouse_engine_usage/managerhelper/operations-script.js constant TABLE_OPERATIONS (line 94) | const TABLE_OPERATIONS = { constant FILE_OPERATIONS (line 231) | const FILE_OPERATIONS = { function initializeTabs (line 320) | function initializeTabs() { function switchTab (line 333) | function switchTab(tabId) { function initializeEventListeners (line 355) | function initializeEventListeners() { function handleTableOperationChange (line 378) | function handleTableOperationChange() { function handleFileOperationChange (line 393) | function handleFileOperationChange() { function showNoOperationSelected (line 408) | function showNoOperationSelected(container) { function renderDynamicFields (line 423) | function renderDynamicFields(container, operationDef, type) { function renderField (line 448) | function renderField(field, type) { function validateField (line 495) | function validateField(input) { function clearFieldValidation (line 538) | function clearFieldValidation(input) { function updateAddButtonState (line 555) | function updateAddButtonState() { function addCurrentOperation (line 565) | function addCurrentOperation() { function removeOperation (line 630) | function removeOperation(id) { function clearAllOperations (line 641) | function clearAllOperations() { function renderOperationsList (line 657) | function renderOperationsList() { function updateGenerateButtonState (line 699) | function updateGenerateButtonState() { function generateJSON (line 711) | function generateJSON() { function displayJSON (line 747) | function displayJSON(config) { function highlightJSON (line 757) | function highlightJSON() { function formatJSON (line 773) | function formatJSON() { function validateJSON (line 793) | function validateJSON() { function showValidationResult (line 834) | function showValidationResult(isValid, message) { function copyToClipboard (line 843) | async function copyToClipboard() { function downloadJSON (line 869) | function downloadJSON() { function enableActionButtons (line 903) | function enableActionButtons() { function showLoading (line 911) | function showLoading() { function hideLoading (line 918) | function hideLoading() { function showToast (line 927) | function showToast(message, type = 'success') { function saveToLocalStorage (line 946) | function saveToLocalStorage() { function loadFromLocalStorage (line 959) | function loadFromLocalStorage() { FILE: samples/tpch_load_and_analysis_tutorial.py function is_a_super_vip (line 40) | def is_a_super_vip(df: DataFrame) -> DataFrame: FILE: tests/conftest.py function patch_databricks_utils_job_info (line 23) | def patch_databricks_utils_job_info() -> Generator: function pytest_addoption (line 33) | def pytest_addoption(parser: Any) -> Any: function spark_driver_memory (line 43) | def spark_driver_memory(request: Any) -> Any: function prepare_exec_env (line 49) | def prepare_exec_env(spark_driver_memory: str) -> None: function before_each_test (line 59) | def before_each_test() -> Generator: function test_session_closure (line 66) | def test_session_closure(request: Any) -> None: FILE: tests/feature/custom_expectations/test_custom_expectations.py function test_custom_expectation (line 171) | def test_custom_expectation(scenario: dict, caplog: Any) -> None: function _clean_folders (line 218) | def _clean_folders(expectation_name: str) -> None: function _generate_acon (line 228) | def _generate_acon( function _generate_dataframe (line 268) | def _generate_dataframe(load_type: str, expectation_name: str) -> DataFr... function _get_result_and_control_dfs (line 305) | def _get_result_and_control_dfs( FILE: tests/feature/custom_expectations/test_expectation_validity.py function test_expectation_validity (line 35) | def test_expectation_validity(expectation: str) -> None: function _run_diagnostics (line 58) | def _run_diagnostics(expectation_name: str) -> tuple: function _process_diagnostics_output (line 87) | def _process_diagnostics_output(diagnostics_output: str) -> None: function _validate_metric_name_structure (line 104) | def _validate_metric_name_structure(metric_name: str) -> int: FILE: tests/feature/data_loader_custom_transformer/test_data_loader_custom_transformer_calculate_kpi.py function yet_another_kpi_calculator (line 26) | def yet_another_kpi_calculator(df: DataFrame) -> DataFrame: function get_test_acon (line 47) | def get_test_acon() -> dict: function test_calculate_kpi_and_merge (line 93) | def test_calculate_kpi_and_merge(scenario: str) -> None: FILE: tests/feature/data_loader_custom_transformer/test_data_loader_custom_transformer_delta_load.py function multiply_by_100 (line 26) | def multiply_by_100(df: DataFrame) -> DataFrame: function get_test_acon (line 39) | def get_test_acon() -> dict: function test_delta_load (line 140) | def test_delta_load(scenario: str) -> None: function _create_table (line 191) | def _create_table(table_name: str, location: str) -> None: FILE: tests/feature/data_loader_custom_transformer/test_data_loader_custom_transformer_sql_transformation.py function get_test_acon (line 31) | def get_test_acon() -> dict: function test_sql_transformation_and_merge (line 78) | def test_sql_transformation_and_merge(scenario: str) -> None: FILE: tests/feature/delta_load/test_delta_load_group_and_rank.py function test_delta_load_group_and_rank (line 37) | def test_delta_load_group_and_rank(scenario: List[str]) -> None: function execute_loads (line 81) | def execute_loads(scenario: List[str], iteration: int) -> None: function _create_table (line 122) | def _create_table(scenario: List[str]) -> None: FILE: tests/feature/delta_load/test_delta_load_merge_options.py function test_delta_load_merge_options (line 31) | def test_delta_load_merge_options(scenario: List[str]) -> None: function execute_loads (line 72) | def execute_loads(scenario: List[str]) -> None: FILE: tests/feature/delta_load/test_delta_load_record_mode_cdc.py function test_batch_delta_load (line 35) | def test_batch_delta_load(scenario: List[str]) -> None: function test_file_by_file (line 87) | def test_file_by_file(scenario: str) -> None: function test_backfill (line 157) | def test_backfill(scenario: str) -> None: function test_direct_silver_load (line 214) | def test_direct_silver_load(scenario: str) -> None: function _create_table (line 273) | def _create_table(table_name: str, location: str) -> None: FILE: tests/feature/test_append_load.py function test_permissive_jdbc_append_load (line 27) | def test_permissive_jdbc_append_load(scenario: str) -> None: function test_failfast_append_load (line 70) | def test_failfast_append_load(scenario: str) -> None: function test_streaming_dropmalformed (line 98) | def test_streaming_dropmalformed(scenario: str) -> None: function test_streaming_with_terminators (line 140) | def test_streaming_with_terminators(scenario: str, caplog: Any) -> None: function _append_data_into_source (line 178) | def _append_data_into_source(scenario: str) -> None: FILE: tests/feature/test_data_quality.py function test_load_with_dq_validator (line 80) | def test_load_with_dq_validator(scenario: dict) -> None: function test_load_with_dq_validator_table (line 243) | def test_load_with_dq_validator_table(scenario: dict) -> None: function test_validator_dq_spec (line 528) | def test_validator_dq_spec(scenario: dict, caplog: Any) -> None: function test_chunked_result_sink (line 816) | def test_chunked_result_sink(scenario: dict, caplog: Any) -> None: function _test_result_structure (line 905) | def _test_result_structure(df: DataFrame) -> None: function _prepare_validation_df (line 922) | def _prepare_validation_df(df: DataFrame) -> DataFrame: FILE: tests/feature/test_dq_validator.py function test_dq_rule_id_uniqueness (line 101) | def test_dq_rule_id_uniqueness(scenario: dict, caplog: Any) -> None: function test_dq_validator (line 360) | def test_dq_validator(scenario: dict, caplog: Any) -> None: function test_dq_validator_two_runs (line 507) | def test_dq_validator_two_runs(scenario: dict, caplog: Any) -> None: function _clean_folders (line 585) | def _clean_folders() -> None: function _create_table (line 596) | def _create_table(table_name: str) -> None: function _execute_load (line 622) | def _execute_load(load_type: str) -> None: function _generate_acon (line 644) | def _generate_acon( function _generate_dataframe (line 706) | def _generate_dataframe(load_type: str) -> DataFrame: function _get_result_and_control_dfs (line 734) | def _get_result_and_control_dfs( FILE: tests/feature/test_engine_usage_stats.py function custom_transformation (line 33) | def custom_transformation(df: DataFrame) -> DataFrame: function _get_test_acon (line 45) | def _get_test_acon(scenario_name: str) -> dict: function test_load_data (line 111) | def test_load_data(scenario: str) -> None: function test_table_manager (line 139) | def test_table_manager(scenario: str) -> None: function test_dq_validator (line 167) | def test_dq_validator(scenario: str) -> None: function _prepare_and_compare_dfs (line 224) | def _prepare_and_compare_dfs(scenario: str) -> None: function _prepare_df_comparison (line 252) | def _prepare_df_comparison(df: DataFrame) -> str: FILE: tests/feature/test_extract_from_sap_b4.py function test_extract_aq_dso (line 175) | def test_extract_aq_dso(scenario: dict) -> None: function test_extract_cl_dso (line 195) | def test_extract_cl_dso(scenario: dict) -> None: function _execute_and_validate (line 214) | def _execute_and_validate(scenario: dict, extra_params: dict) -> None: function _execute_load (line 244) | def _execute_load( function _get_test_acon (line 284) | def _get_test_acon( function _prepare_files (line 362) | def _prepare_files(scenario: str, extra_params: dict) -> None: function _load_test_table (line 397) | def _load_test_table( function _validate (line 437) | def _validate(scenario: str, extra_params: dict, min_timestamp: bool) ->... FILE: tests/feature/test_extract_from_sap_bw.py function test_extract_dso (line 210) | def test_extract_dso(scenario: dict, caplog: LogCaptureFixture) -> None: function test_extract_write_optimised_dso (line 232) | def test_extract_write_optimised_dso(scenario: dict, caplog: LogCaptureF... function _execute_and_validate (line 253) | def _execute_and_validate( function _execute_load (line 305) | def _execute_load( function _get_test_acon (line 345) | def _get_test_acon( function _prepare_files (line 446) | def _prepare_files(scenario: str, extra_params: dict) -> None: function _load_test_table (line 491) | def _load_test_table( function _validate (line 531) | def _validate(scenario: str, extra_params: dict, min_timestamp: bool) ->... function test_changelog_table_name_derivation (line 578) | def test_changelog_table_name_derivation(scenario: dict) -> None: FILE: tests/feature/test_file_manager.py function test_file_manager (line 17) | def test_file_manager(caplog: Any) -> None: function _test_file_manager_copy (line 52) | def _test_file_manager_copy(caplog: Any, s3_cli: Any) -> None: function _test_file_manager_delete (line 80) | def _test_file_manager_delete(caplog: Any, s3_cli: Any) -> None: function test_file_manager_restore_archive (line 113) | def test_file_manager_restore_archive(scenario: dict, caplog: Any) -> None: function _test_file_manager_restore_check (line 146) | def _test_file_manager_restore_check(caplog: Any, s3_cli: Any, s3_res: A... function _test_file_manager_restore_request (line 179) | def _test_file_manager_restore_request(caplog: Any, s3_cli: Any, s3_res:... function test_file_manager_restore_sync (line 220) | def test_file_manager_restore_sync(scenario: dict, caplog: Any) -> None: function _test_file_manager_restore_sync (line 253) | def _test_file_manager_restore_sync(caplog: Any, s3_cli: Any, s3_res: An... function _test_file_manager_restore_sync_retrieval_tier_exception (line 299) | def _test_file_manager_restore_sync_retrieval_tier_exception(caplog: Any... FILE: tests/feature/test_file_manager_dbfs.py class FileInfoFixture (line 23) | class FileInfoFixture: method isDir (line 30) | def isDir(self) -> bool: method isFile (line 38) | def isFile(self) -> bool: class DBUtilsFixture (line 47) | class DBUtilsFixture: method __init__ (line 50) | def __init__(self) -> None: method cp (line 55) | def cp(src: str, dest: str, recurse: bool = False) -> None: method ls (line 71) | def ls(path: str) -> list: method mkdirs (line 84) | def mkdirs(path: str) -> None: method mv (line 93) | def mv(src: str, dest: str, recurse: bool = False) -> None: method put (line 109) | def put(path: str, content: str, overwrite: bool = False) -> None: method rm (line 125) | def rm(path: str, recurse: bool = False) -> None: function dbutils_fixture (line 141) | def dbutils_fixture() -> Iterator[None]: function test_file_manager_dbfs (line 152) | def test_file_manager_dbfs(_patch: Any, caplog: Any) -> None: function _list_objects (line 184) | def _list_objects(path: str, objects_list: list, dbutils: Any) -> list: function _test_file_manager_dbfs_copy (line 195) | def _test_file_manager_dbfs_copy(caplog: Any, dbutils: Any) -> None: function _test_file_manager_dbfs_delete (line 229) | def _test_file_manager_dbfs_delete(caplog: Any, dbutils: Any) -> None: function _test_file_manager_dbfs_move (line 261) | def _test_file_manager_dbfs_move(caplog: Any, dbutils: Any) -> None: FILE: tests/feature/test_file_manager_s3.py function test_get_caller_identity_with_default_credentials (line 18) | def test_get_caller_identity_with_default_credentials() -> None: function test_file_manager_s3 (line 24) | def test_file_manager_s3(caplog: Any) -> None: function _test_file_manager_s3_copy (line 60) | def _test_file_manager_s3_copy(caplog: Any, s3_cli: Any) -> None: function _test_file_manager_s3_delete (line 93) | def _test_file_manager_s3_delete(caplog: Any, s3_cli: Any) -> None: function test_file_manager_s3_restore_archive (line 128) | def test_file_manager_s3_restore_archive(scenario: dict, caplog: Any) ->... function _test_file_manager_s3_restore_check (line 162) | def _test_file_manager_s3_restore_check(caplog: Any, s3_cli: Any, s3_res... function _test_file_manager_s3_restore_request (line 195) | def _test_file_manager_s3_restore_request( function test_file_manager_s3_restore_sync (line 238) | def test_file_manager_s3_restore_sync(scenario: dict, caplog: Any) -> None: function _test_file_manager_s3_restore_sync (line 272) | def _test_file_manager_s3_restore_sync(caplog: Any, s3_cli: Any, s3_res:... function _test_file_manager_s3_restore_sync_retrieval_tier_exception (line 318) | def _test_file_manager_s3_restore_sync_retrieval_tier_exception(caplog: ... FILE: tests/feature/test_full_load.py function test_batch_full_load (line 34) | def test_batch_full_load(scenario: List[str]) -> None: FILE: tests/feature/test_gab.py function _create_gab_tables (line 42) | def _create_gab_tables() -> None: function _generate_calendar_test_dates (line 53) | def _generate_calendar_test_dates() -> list: function _transform_dates_list_to_dataframe (line 65) | def _transform_dates_list_to_dataframe(dates: list) -> DataFrame: function _feed_dim_calendar (line 79) | def _feed_dim_calendar(df: DataFrame) -> DataFrame: function _feed_table_with_test_data (line 138) | def _feed_table_with_test_data( function _create_and_load_source_data_for_use_case (line 192) | def _create_and_load_source_data_for_use_case(source_table: str) -> None: function _import_use_case_sql (line 208) | def _import_use_case_sql(use_case_name: str) -> None: function _setup_use_case (line 220) | def _setup_use_case(use_case_name: str) -> None: function _gab_setup (line 231) | def _gab_setup() -> None: function _run_setup_use_case (line 265) | def _run_setup_use_case(request: SubRequest) -> None: function test_gold_asset_builder (line 340) | def test_gold_asset_builder(scenario: dict, caplog: Any) -> None: FILE: tests/feature/test_heartbeat.py function _create_heartbeat_table (line 36) | def _create_heartbeat_table(scenario_name: str, tables: dict) -> None: function _test_heartbeat_sensor_data_feed (line 53) | def _test_heartbeat_sensor_data_feed( function _test_execute_sensor_heartbeat (line 88) | def _test_execute_sensor_heartbeat( function _test_update_heartbeat_sensor_status (line 128) | def _test_update_heartbeat_sensor_status( function _trigger_heartbeat_sensor_jobs (line 183) | def _trigger_heartbeat_sensor_jobs( function test_heartbeat (line 287) | def test_heartbeat(scenario: dict) -> None: FILE: tests/feature/test_jdbc_reader.py function test_jdbc_reader (line 56) | def test_jdbc_reader(scenario: List[str]) -> None: FILE: tests/feature/test_materialize_cdf.py function test_streaming_with_cdf (line 30) | def test_streaming_with_cdf(scenario: str, caplog: Any) -> None: FILE: tests/feature/test_notification.py function test_email_notification (line 133) | def test_email_notification(scenario: dict) -> None: function test_email_notification_facade (line 273) | def test_email_notification_facade(scenario: dict) -> None: function _parse_email_output (line 332) | def _parse_email_output( FILE: tests/feature/test_reconciliation.py function test_reconciliation (line 306) | def test_reconciliation(scenario: str, caplog: Any) -> None: function test_nulls_and_zero_values_and_threshold (line 389) | def test_nulls_and_zero_values_and_threshold( FILE: tests/feature/test_schema_evolution.py function prepare_tests (line 30) | def prepare_tests() -> Generator: function test_schema_evolution_delta_load (line 107) | def test_schema_evolution_delta_load(scenario: str) -> None: function test_schema_evolution_append_load (line 308) | def test_schema_evolution_append_load(scenario: str) -> None: function test_schema_evolution_full_load (line 463) | def test_schema_evolution_full_load(scenario: str) -> None: function _create_table (line 531) | def _create_table(table_name: str, location: str) -> None: FILE: tests/feature/test_sensors.py function test_table_sensor (line 64) | def test_table_sensor(scenario: list) -> None: function test_if_sensor_already_exists (line 179) | def test_if_sensor_already_exists(scenario: dict) -> None: function test_jdbc_sensor (line 255) | def test_jdbc_sensor(scenario: str) -> None: function test_files_sensor (line 331) | def test_files_sensor() -> None: function test_update_sensor_status (line 366) | def test_update_sensor_status() -> None: function _insert_data_into_upstream_table (line 402) | def _insert_data_into_upstream_table( function _insert_files_sensor_test_data (line 428) | def _insert_files_sensor_test_data(files_location: str) -> StructType: function _insert_into_jdbc_table (line 452) | def _insert_into_jdbc_table( FILE: tests/feature/test_sftp_reader.py function sftp_client (line 43) | def sftp_client(sftpserver: SFTPServer) -> Generator: function test_sftp_reader_csv (line 86) | def test_sftp_reader_csv( function test_sftp_reader_fwf (line 145) | def test_sftp_reader_fwf( function test_sftp_reader_gz_file (line 194) | def test_sftp_reader_gz_file( function test_sftp_reader_json (line 243) | def test_sftp_reader_json( function test_sftp_reader_mult_files (line 292) | def test_sftp_reader_mult_files( function test_sftp_reader_xml (line 348) | def test_sftp_reader_xml( function test_sftp_reader_zip_file (line 404) | def test_sftp_reader_zip_file( function test_sftp_server_available (line 442) | def test_sftp_server_available(sftpserver: SFTPServer) -> None: function _execute_and_validate (line 453) | def _execute_and_validate( function _get_test_acon (line 509) | def _get_test_acon( function remote_location (line 548) | def remote_location() -> dict: function rename_remote_files (line 589) | def rename_remote_files(sftp_client: SFTPClient) -> None: FILE: tests/feature/test_sharepoint_reader.py function _read_bytes (line 114) | def _read_bytes(path_value: str) -> bytes: function _get_output_path_by_scenario (line 119) | def _get_output_path_by_scenario() -> Dict[str, str]: function _setup_sharepoint_reader_mocks_for_success (line 157) | def _setup_sharepoint_reader_mocks_for_success( function _assert_archive_calls_for_success (line 226) | def _assert_archive_calls_for_success( function _assert_sharepoint_reader_success_output (line 291) | def _assert_sharepoint_reader_success_output( function test_sharepoint_reader_success (line 338) | def test_sharepoint_reader_success( function test_sharepoint_reader_failures (line 391) | def test_sharepoint_reader_failures( function test_sharepoint_reader_exceptions (line 612) | def test_sharepoint_reader_exceptions( FILE: tests/feature/test_sharepoint_writer.py function test_sharepoint_writer_exceptions (line 104) | def test_sharepoint_writer_exceptions( function test_sharepoint_writer (line 180) | def test_sharepoint_writer( FILE: tests/feature/test_table_manager.py function test_table_manager (line 86) | def test_table_manager(scenarios: dict, caplog: Any) -> None: FILE: tests/feature/test_writers.py function test_write_to_files (line 53) | def test_write_to_files(scenario: dict) -> None: function test_write_to_rest_api (line 85) | def test_write_to_rest_api(scenario: dict) -> None: function test_write_to_jdbc (line 112) | def test_write_to_jdbc(scenario: dict) -> None: function test_write_to_table (line 147) | def test_write_to_table(scenario: dict) -> None: function test_write_to_console (line 177) | def test_write_to_console(scenario: dict, capsys: Any) -> None: function test_write_to_dataframe (line 206) | def test_write_to_dataframe(scenario: dict, capsys: Any) -> None: function test_write_to_dataframe_checkpoints (line 259) | def test_write_to_dataframe_checkpoints(scenario: dict, capsys: Any) -> ... function test_multiple_write_to_dataframe (line 320) | def test_multiple_write_to_dataframe(scenario: dict, capsys: Any) -> None: function test_write_to_dataframe_exception (line 367) | def test_write_to_dataframe_exception(scenario: dict, capsys: Any) -> None: function _generate_acon_from_source (line 418) | def _generate_acon_from_source(source: OrderedDict) -> dict: function _prepare_files (line 458) | def _prepare_files(iteration: int = 0) -> None: FILE: tests/feature/transformations/test_chain_transformations.py function test_chain_transformations (line 37) | def test_chain_transformations(scenario: dict, caplog: Any) -> None: function _prepare_files (line 100) | def _prepare_files() -> None: FILE: tests/feature/transformations/test_column_creators.py function test_column_creators (line 28) | def test_column_creators(scenario: str) -> None: FILE: tests/feature/transformations/test_column_reshapers.py function test_column_reshapers (line 34) | def test_column_reshapers(scenario: dict) -> None: FILE: tests/feature/transformations/test_data_maskers.py function test_data_maskers (line 29) | def test_data_maskers(scenario: str) -> None: FILE: tests/feature/transformations/test_date_transformers.py function test_date_transformers (line 29) | def test_date_transformers(scenario: str) -> None: FILE: tests/feature/transformations/test_drop_duplicate_rows.py function test_drop_duplicate_rows (line 31) | def test_drop_duplicate_rows(scenario: str) -> None: FILE: tests/feature/transformations/test_joiners.py function test_joiners (line 37) | def test_joiners(scenario: List[str]) -> None: FILE: tests/feature/transformations/test_multiple_transformations.py function test_multiple_transformations (line 28) | def test_multiple_transformations(scenario: str) -> None: FILE: tests/feature/transformations/test_null_handlers.py function test_replace_nulls (line 29) | def test_replace_nulls(scenario: str) -> None: FILE: tests/feature/transformations/test_optimizers.py function is_df_cached (line 15) | def is_df_cached(df: DataFrame) -> DataFrame: function is_df_not_cached (line 30) | def is_df_not_cached(df: DataFrame) -> DataFrame: function test_optimizer (line 46) | def test_optimizer(scenario: str) -> None: function _get_test_acon (line 58) | def _get_test_acon(read_type: str) -> dict: FILE: tests/feature/transformations/test_regex_transformers.py function test_regex_transformers (line 29) | def test_regex_transformers(scenario: str) -> None: FILE: tests/feature/transformations/test_unions.py function test_unions (line 45) | def test_unions(scenario: List[str]) -> None: function copy_data_files (line 116) | def copy_data_files(iteration: int) -> None: FILE: tests/feature/transformations/test_watermarker.py function test_drop_duplicates_with_watermark (line 33) | def test_drop_duplicates_with_watermark(scenario: dict) -> None: function test_joins_with_watermark (line 93) | def test_joins_with_watermark(scenario: dict) -> None: function _drop_and_create_table (line 159) | def _drop_and_create_table(table_name: str, location: str) -> None: FILE: tests/resources/feature/materialize_cdf/data/table/streaming_with_cdf.sql type test_db (line 1) | CREATE TABLE test_db.streaming_with_cdf (salesorder INT, item INT, date ... FILE: tests/resources/feature/table_manager/create/table/test_table_complex_default_scenario.sql type test_db (line 3) | CREATE TABLE test_db.DummyTableBronzeComplexDefaultScenario1 FILE: tests/resources/feature/table_manager/create/table/test_table_complex_different_delimiter_scenario.sql type test_db (line 3) | CREATE TABLE test_db.DummyTableBronzeComplexDifferentDelimiterScenario1 FILE: tests/resources/feature/table_manager/create/table/test_table_simple_split_scenario.sql type test_db (line 1) | CREATE TABLE test_db.DummyTableBronzeSimpleSplitScenario FILE: tests/resources/feature/table_manager/create/view/test_view_complex_default_scenario.sql type test_db (line 3) | CREATE VIEW test_db.DummyViewBronzeComplexDefaultScenario1 (id,col1,col2... type test_db (line 8) | CREATE VIEW test_db.DummyViewBronzeComplexDefaultScenario2 (id,col1,col2... FILE: tests/resources/feature/table_manager/create/view/test_view_complex_different_delimiter_scenario.sql type test_db (line 3) | CREATE VIEW test_db.DummyViewBronzeComplexDifferentDelimiterScenario1 (i... FILE: tests/resources/feature/table_manager/create/view/test_view_simple_split_scenario.sql type test_db (line 1) | CREATE VIEW test_db.DummyViewBronzeSimpleSplitScenario (id,col1,col2,col... FILE: tests/unit/test_acon_validation.py function test_manager_validation (line 93) | def test_manager_validation(scenario: dict) -> None: FILE: tests/unit/test_custom_configs.py function test_custom_config (line 13) | def test_custom_config() -> None: FILE: tests/unit/test_databricks_utils.py function test_get_usage_context_for_serverless (line 27) | def test_get_usage_context_for_serverless() -> None: FILE: tests/unit/test_failure_notification_creation.py function test_failure_notification_creation (line 43) | def test_failure_notification_creation(scenario: dict) -> None: function _parse_email_output (line 73) | def _parse_email_output(mail_content: str) -> str: FILE: tests/unit/test_heartbeat_acon_creation.py function _create_heartbeat_table (line 24) | def _create_heartbeat_table() -> None: function _select_all (line 36) | def _select_all(table: str) -> DataFrame: function _check_acon (line 45) | def _check_acon(heartbeat_table: str, acon: dict, acon_result_list: dict... function test_get_sensor_acon (line 219) | def test_get_sensor_acon(mock_get_db_utils: Mock, scenario: dict) -> None: FILE: tests/unit/test_heartbeat_anchor_job.py function _create_heartbeat_table (line 22) | def _create_heartbeat_table() -> None: function test_anchor_job (line 104) | def test_anchor_job(mock_run_job: Mock, scenario: dict) -> None: FILE: tests/unit/test_log_filter_sensitive_data.py function test_log_filter_sensitive_data (line 65) | def test_log_filter_sensitive_data(caplog: Any) -> None: FILE: tests/unit/test_notification_creation.py function test_notification_creation (line 74) | def test_notification_creation(scenario: dict) -> None: function test_office365_notification_creation (line 132) | def test_office365_notification_creation(scenario: TerminatorSpec) -> None: FILE: tests/unit/test_notification_factory.py function test_notification_factory (line 48) | def test_notification_factory(scenario: dict) -> None: FILE: tests/unit/test_prisma_dq_rule_id.py function test_prisma_manual_function_definition (line 130) | def test_prisma_manual_function_definition(scenario: dict) -> None: FILE: tests/unit/test_prisma_function_definition.py function test_prisma_manual_function_definition (line 102) | def test_prisma_manual_function_definition(scenario: dict) -> None: FILE: tests/unit/test_rest_api_functions.py function test_send_payload_to_rest_api_simple_params (line 22) | def test_send_payload_to_rest_api_simple_params(_: Any, caplog: Any) -> ... function test_send_payload_to_rest_api_with_file_params (line 54) | def test_send_payload_to_rest_api_with_file_params(_: Any, caplog: Any) ... FILE: tests/unit/test_sensor.py function test_create_sensor (line 82) | def test_create_sensor(scenario: dict, capsys: Any) -> None: function test_sensor_already_exists (line 234) | def test_sensor_already_exists(scenario: dict, capsys: Any) -> None: class TestExecuteSensor (line 263) | class TestExecuteSensor: method setup_class (line 273) | def setup_class(cls) -> None: method teardown_class (line 278) | def teardown_class(cls) -> None: method test_execute_stream_sensor (line 303) | def test_execute_stream_sensor(self, scenario: dict, capsys: Any) -> N... method test_execute_batch_sensor (line 355) | def test_execute_batch_sensor(self, scenario: dict, capsys: Any) -> None: method test_execute_sensor_raise_no_input_spec_format_implemented (line 432) | def test_execute_sensor_raise_no_input_spec_format_implemented( method test_execute_sensor_raise_no_new_data_exception (line 467) | def test_execute_sensor_raise_no_new_data_exception( FILE: tests/unit/test_sensor_manager.py function test_sensor_update_set (line 58) | def test_sensor_update_set(scenario: dict, capsys: Any) -> None: function test_sensor_data (line 96) | def test_sensor_data(scenario: dict, capsys: Any) -> None: function test_check_if_sensor_has_acquired_data (line 170) | def test_check_if_sensor_has_acquired_data(scenario: dict, capsys: Any) ... function control_table_fixture (line 193) | def control_table_fixture() -> DataFrame: function test_read_sensor_table_data (line 271) | def test_read_sensor_table_data( function test_has_new_data (line 330) | def test_has_new_data(scenario: dict, capsys: Any) -> None: function test_if_generate_filter_exp_preprocess_query (line 461) | def test_if_generate_filter_exp_preprocess_query(scenario: dict, capsys:... function test_generate_sensor_table_preprocess_query (line 521) | def test_generate_sensor_table_preprocess_query(scenario: dict, capsys: ... function dataframe_fixture (line 536) | def dataframe_fixture() -> DataFrame: function test_read_new_data (line 562) | def test_read_new_data( function test_generate_sensor_sap_logchain_query (line 680) | def test_generate_sensor_sap_logchain_query(scenario: dict, capsys: Any)... function _prepare_new_data_tests (line 728) | def _prepare_new_data_tests(return_empty_df: bool = False) -> DataFrame: FILE: tests/unit/test_sharepoint_csv_reader.py class DummySharepointOptions (line 14) | class DummySharepointOptions: method __init__ (line 22) | def __init__(self, local_options: Dict[str, Any]) -> None: class DummyInputSpec (line 27) | class DummyInputSpec: method __init__ (line 34) | def __init__(self, sharepoint_options: DummySharepointOptions) -> None: function create_csv_reader (line 39) | def create_csv_reader(local_options: Dict[str, Any]) -> SharepointCsvRea... function test_detect_delimiter_uses_user_provided_delimiter (line 55) | def test_detect_delimiter_uses_user_provided_delimiter() -> None: function test_detect_delimiter_autodetects_semicolon (line 66) | def test_detect_delimiter_autodetects_semicolon() -> None: function test_detect_delimiter_defaults_to_comma_on_decode_error (line 77) | def test_detect_delimiter_defaults_to_comma_on_decode_error() -> None: function test_resolve_csv_options_prefers_sep_over_delimiter (line 88) | def test_resolve_csv_options_prefers_sep_over_delimiter() -> None: function test_resolve_spark_csv_options_uses_delimiter_when_sep_missing (line 106) | def test_resolve_spark_csv_options_uses_delimiter_when_sep_missing() -> ... function test_resolve_spark_csv_options_autodetects_when_no_delimiter_provided (line 118) | def test_resolve_spark_csv_options_autodetects_when_no_delimiter_provide... function test_resolve_spark_csv_options_warns_when_expected_columns_names_mismatch (line 133) | def test_resolve_spark_csv_options_warns_when_expected_columns_names_mis... function test_resolve_spark_csv_options_warns_when_expected_columns_validation_fails (line 160) | def test_resolve_spark_csv_options_warns_when_expected_columns_validatio... FILE: tests/unit/test_spark_session.py function test_spark_session (line 9) | def test_spark_session() -> None: FILE: tests/unit/test_version.py function test_version (line 8) | def test_version() -> None: FILE: tests/utils/dataframe_helpers.py class DataframeHelpers (line 26) | class DataframeHelpers(object): method has_diff (line 32) | def has_diff( method read_from_file (line 73) | def read_from_file( method read_from_table (line 104) | def read_from_table(db_table: str, options: Optional[dict] = None) -> ... method read_from_jdbc (line 123) | def read_from_jdbc( method write_into_jdbc_table (line 145) | def write_into_jdbc_table( method create_empty_dataframe (line 174) | def create_empty_dataframe(struct_type: StructType) -> DataFrame: method create_dataframe (line 187) | def create_dataframe(data: list, schema: StructType) -> DataFrame: method create_delta_table (line 201) | def create_delta_table( FILE: tests/utils/dq_rules_table_utils.py function _create_dq_functions_source_table (line 7) | def _create_dq_functions_source_table( FILE: tests/utils/exec_env_helpers.py class ExecEnvHelpers (line 6) | class ExecEnvHelpers(object): method prepare_exec_env (line 10) | def prepare_exec_env(spark_driver_memory: str) -> None: method set_exec_env_config (line 30) | def set_exec_env_config(cls, key: str, value: str) -> None: method reset_default_spark_session_configs (line 35) | def reset_default_spark_session_configs(cls) -> None: FILE: tests/utils/local_storage.py class LocalStorage (line 13) | class LocalStorage(object): method copy_file (line 17) | def copy_file(from_path: str, to_path: str) -> None: method clean_folder (line 29) | def clean_folder(folder_path: str) -> None: method delete_file (line 38) | def delete_file(file_path: str) -> None: method read_file (line 48) | def read_file(file_path: str) -> str: method copy_dir (line 59) | def copy_dir(source: str, destination: str) -> None: FILE: tests/utils/mocks.py class MockRESTResponse (line 9) | class MockRESTResponse: method __init__ (line 12) | def __init__( method json (line 30) | def json(self) -> Optional[dict[str, Any]]: method __enter__ (line 37) | def __enter__(self) -> MockRESTResponse: method __exit__ (line 41) | def __exit__( FILE: tests/utils/smtp_server.py class SMTPHandler (line 12) | class SMTPHandler(Message): method __init__ (line 15) | def __init__(self) -> None: method handle_message (line 20) | def handle_message(self, message: Any) -> None: class SMTPServer (line 32) | class SMTPServer: method __init__ (line 37) | def __init__(self, host: str, port: int) -> None: method start (line 49) | def start(self) -> None: method stop (line 57) | def stop(self) -> None: method get_messages (line 63) | def get_messages(self) -> list: method clear_messages (line 67) | def clear_messages(self) -> None: method get_last_message (line 71) | def get_last_message(self) -> Any: