SYMBOL INDEX (60 symbols across 17 files) FILE: ch04/code/ControlStructures.py function get_random_int_op (line 22) | def get_random_int_op(minimum: int, maximum: int) -> int: function process_small_op (line 31) | def process_small_op(data: int): function process_medium_op (line 38) | def process_medium_op(data: int): function process_large_op (line 45) | def process_large_op(data: int): function conditional_pipeline (line 58) | def conditional_pipeline(): FILE: ch04/code/Lightweight Pipeline.py function add (line 22) | def add(a: float, b: float) -> float: function my_divmod (line 40) | def my_divmod( function calc_pipeline (line 80) | def calc_pipeline( FILE: ch04/code/RecommenderPipeline.py function recommender_pipeline (line 46) | def recommender_pipeline(): FILE: ch06/MLflow.py function evaluation_model (line 120) | def evaluation_model(y_test, y_pred): function train_knnmodel (line 142) | def train_knnmodel(parameters, inputs, tags, log=False): function train_mlpmodel (line 211) | def train_mlpmodel(parameters, inputs, tags, log=False): class PTG (line 286) | class PTG: method __init__ (line 287) | def __init__(self, thresholds_x0, thresholds_a, thresholds_b): method get_ptgmodel (line 292) | def get_ptgmodel(self, x, a, b, x0): method fit (line 296) | def fit(self, dfx, y): method predict (line 317) | def predict(self, dfx): function train_ptgmodel (line 328) | def train_ptgmodel(parameters, inputs, tags, log=False): FILE: ch2/query-endpoint.py function download_mnist (line 25) | def download_mnist(): function gen_image (line 29) | def gen_image(arr): FILE: ch2_seldon_examples/tf_mnist_no_seldon_pipeline.py function mnist_pipeline (line 29) | def mnist_pipeline(gcs_bucket=None, FILE: ch2_seldon_examples/train_pipeline.py function mnist_train_pipeline (line 11) | def mnist_train_pipeline(docker_org="index.docker.io/seldonio", FILE: ch9/ctscans/process-dicoms-into-vectors/src/program.py function create_3d_matrix (line 25) | def create_3d_matrix(path): function upload_blob (line 53) | def upload_blob(bucket_name, source_file_name, destination_blob_name): FILE: ch9/ctscans/visualize-basis-vectors/src/program.py function read_mahout_drm (line 19) | def read_mahout_drm(path): function plot_3d_matrix (line 35) | def plot_3d_matrix(img3d, img_shape, ax_aspect, sag_aspect, cor_aspect): function plot_2_3d_matrices (line 51) | def plot_2_3d_matrices(img1, img2, aspect, slice, cmap): function upload_blob (line 61) | def upload_blob(bucket_name, source_file_name, destination_blob_name): function download_folder (line 77) | def download_folder(bucket_name='your-bucket-name', FILE: data-extraction/python-notebook/MailingListDataPrep.py function scrapeMailArchives (line 36) | def scrapeMailArchives(mailingList: str, year: int, month: int): function extract_links (line 69) | def extract_links(body): function extract_domains (line 75) | def extract_domains(links): function contains_python_stack_trace (line 91) | def contains_python_stack_trace(body): function contains_probably_java_stack_trace (line 95) | def contains_probably_java_stack_trace(body): function contains_exception_in_task (line 106) | def contains_exception_in_task(body): function makeDomainsAList (line 147) | def makeDomainsAList(d): function download_data (line 189) | def download_data(year: int) -> str: function download_tld_data (line 252) | def download_tld_data() -> str: function clean_data (line 278) | def clean_data(input_path: str) -> str: function prepare_features (line 313) | def prepare_features(input_path: str, tld_info_path: str): function my_pipeline_mini (line 430) | def my_pipeline_mini(year: int): function my_pipeline2 (line 466) | def my_pipeline2(year: int): function train_func (line 535) | def train_func(input_path: String): FILE: data-extraction/python-spark-notebook/SparkMailingListForKF.py function download_emails (line 88) | def download_emails(date): function extract_date_from_email_datefield (line 165) | def extract_date_from_email_datefield(datefield): function is_ok (line 202) | def is_ok(post): FILE: data-extraction/python-spark/LaunchSparkJobs.py function local_pipeline (line 61) | def local_pipeline(): FILE: data-extraction/spark-hello-world/hello_world_pipeline.py function spark_hello_world_pipeline (line 11) | def spark_hello_world_pipeline(jar_location="gcs://....", tf_job_image="... FILE: data-extraction/tfx/TFDV.py function pipeline_with_dl (line 42) | def pipeline_with_dl(): function tfdv_pipeline (line 69) | def tfdv_pipeline(): function tfx_pipeline (line 145) | def tfx_pipeline(): FILE: feature-prep/spark/SparkMailingListFeaturePrep.py function extract_links (line 133) | def extract_links(body): function extract_domains (line 140) | def extract_domains(links): function contains_python_stack_trace (line 156) | def contains_python_stack_trace(body): function contains_probably_java_stack_trace (line 160) | def contains_probably_java_stack_trace(body): function contains_exception_in_task (line 171) | def contains_exception_in_task(body): FILE: feature-prep/tft/transform.py function preprocessing_fn (line 10) | def preprocessing_fn(inputs): FILE: recommender/Recommender_Kubeflow.py class DeepCollaborativeFiltering (line 233) | class DeepCollaborativeFiltering(Model): method __init__ (line 234) | def __init__(self, n_customers, n_products, n_factors, p_dropout=0.2): method rate (line 256) | def rate(self, customer_idxs, product_idxs):