reponame stringclasses 2
values | filepath stringclasses 18
values | content stringclasses 18
values |
|---|---|---|
sayakpaul/CI-CD-for-Model-Training | cloud_build_tfx.ipynb | from google.colab import auth
auth.authenticate_user()GOOGLE_CLOUD_PROJECT = "fast-ai-exploration"
GOOGLE_CLOUD_REGION = "us-central1"
GCS_BUCKET_NAME = "vertex-tfx-mlops"
PIPELINE_NAME = "penguin-vertex-training"
DATA_ROOT = "gs://{}/data/{}".format(GCS_BUCKET_NAME, PIPELINE_NAME)
MODULE_ROOT = "gs://{}/pipeline_modu... |
sayakpaul/CI-CD-for-Model-Training | cloud_function_trigger.ipynb | from google.colab import auth
auth.authenticate_user()GOOGLE_CLOUD_PROJECT = "fast-ai-exploration"
GOOGLE_CLOUD_REGION = "us-central1"
GCS_BUCKET_NAME = "vertex-tfx-mlops"
PIPELINE_NAME = "penguin-vertex-training"
PIPELINE_ROOT = "gs://{}/pipeline_root/{}".format(GCS_BUCKET_NAME, PIPELINE_NAME)
PIPELINE_LOCATION = f"{... |
sayakpaul/CI-CD-for-Model-Training | cloud_scheduler_trigger.ipynb | # only need if you are using Colab
from google.colab import auth
auth.authenticate_user()GOOGLE_CLOUD_PROJECT = "gcp-ml-172005"
GOOGLE_CLOUD_REGION = "us-central1"
PIPELINE_NAME = "penguin-vertex-training"
PUBSUB_TOPIC = f"trigger-{PIPELINE_NAME}"
SCHEDULER_JOB_NAME = "MLOpsJob"import json
data = '{"num_epochs": "3",... |
sayakpaul/CI-CD-for-Model-Training | build/compile_pipeline.py | import argparse
from absl import logging
from create_pipeline import create_pipeline
from tfx.orchestration import data_types
from tfx.orchestration.kubeflow.v2 import kubeflow_v2_dag_runner
import os
import sys
SCRIPT_DIR = os.path.dirname(
os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__)... |
sayakpaul/CI-CD-for-Model-Training | build/create_pipeline.py | from tfx.orchestration import data_types
from tfx import v1 as tfx
import os
import sys
SCRIPT_DIR = os.path.dirname(
os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__)))
)
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, "..")))
from utils import config, custom_components
def creat... |
sayakpaul/CI-CD-for-Model-Training | build/penguin_trainer.py | # Copied from https://www.tensorflow.org/tfx/tutorials/tfx/penguin_simple and
# slightly modified run_fn() to add distribution_strategy.
from typing import List
from absl import logging
import tensorflow as tf
from tensorflow import keras
from tensorflow_metadata.proto.v0 import schema_pb2
from tensorflow_transform.tf... |
sayakpaul/CI-CD-for-Model-Training | cloud_function/main.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, ... |
sayakpaul/CI-CD-for-Model-Training | utils/config.py | import os
# GCP
GCP_PROJECT = os.getenv("PROJECT")
GCP_REGION = os.getenv("REGION")
# Data
DATA_ROOT = os.getenv("DATA_ROOT")
# Training and serving
TFX_IMAGE_URI = os.getenv("TFX_IMAGE_URI")
MODULE_ROOT = os.getenv("MODULE_ROOT")
MODULE_FILE = os.path.join(MODULE_ROOT, "penguin_trainer.py")
SERVING_MODEL_DIR = os.g... |
sayakpaul/CI-CD-for-Model-Training | utils/custom_components.py | """
Taken from:
* https://github.com/GoogleCloudPlatform/mlops-with-vertex-ai/blob/main/src/tfx_pipelines/components.py#L51
"""
from tfx.dsl.component.experimental.decorators import component
from tfx.dsl.component.experimental.annotations import (
InputArtifact,
OutputArtifact,
Parameter,
)
from tfx.t... |
sayakpaul/Dual-Deployments-on-Vertex-AI | custom_components/firebase_publisher.py | """
Custom TFX component for Firebase upload.
Author: Chansung Park
"""
from tfx import types
from tfx.dsl.component.experimental.decorators import component
from tfx.dsl.component.experimental.annotations import Parameter
from tfx import v1 as tfx
from absl import logging
import firebase_admin
from firebase_admin im... |
sayakpaul/Dual-Deployments-on-Vertex-AI | custom_components/flower_densenet_trainer.py | from typing import List
from absl import logging
from tensorflow import keras
from tfx import v1 as tfx
import tensorflow as tf
_IMAGE_FEATURES = {
"image": tf.io.FixedLenFeature([], tf.string),
"class": tf.io.FixedLenFeature([], tf.int64),
"one_hot_class": tf.io.VarLenFeature(tf.float32),
}
_CONCRETE_IN... |
sayakpaul/Dual-Deployments-on-Vertex-AI | custom_components/flower_mobilenet_trainer.py | from typing import List
from absl import logging
from tensorflow import keras
from tfx import v1 as tfx
import tensorflow as tf
_IMAGE_FEATURES = {
"image": tf.io.FixedLenFeature([], tf.string),
"class": tf.io.FixedLenFeature([], tf.int64),
"one_hot_class": tf.io.VarLenFeature(tf.float32),
}
_INPUT_SHAPE... |
sayakpaul/Dual-Deployments-on-Vertex-AI | custom_components/vertex_deployer.py | """
Custom TFX component for deploying a model to a Vertex AI Endpoint.
Author: Sayak Paul
Reference: https://github.com/GoogleCloudPlatform/mlops-with-vertex-ai/blob/main/build/utils.py#L97
"""
from tfx.dsl.component.experimental.decorators import component
from tfx.dsl.component.experimental.annotations import Param... |
sayakpaul/Dual-Deployments-on-Vertex-AI | custom_components/vertex_uploader.py | """
Custom TFX component for importing a model into Vertex AI.
Author: Sayak Paul
Reference: https://github.com/GoogleCloudPlatform/mlops-with-vertex-ai/blob/main/src/tfx_pipelines/components.py#L74
"""
import os
import tensorflow as tf
from tfx.dsl.component.experimental.decorators import component
from tfx.dsl.comp... |
sayakpaul/Dual-Deployments-on-Vertex-AI | notebooks/Custom_Model_TFX.ipynb | from google.colab import auth
auth.authenticate_user()import tensorflow as tf
print('TensorFlow version: {}'.format(tf.__version__))
from tfx import v1 as tfx
print('TFX version: {}'.format(tfx.__version__))
import kfp
print('KFP version: {}'.format(kfp.__version__))
from google.cloud import aiplatform as vertex_ai
im... |
sayakpaul/Dual-Deployments-on-Vertex-AI | notebooks/Dataset_Prep.ipynb | #@title GCS
#@markdown You should change these values as per your preferences. The copy operation can take ~5 minutes.
BUCKET_PATH = "gs://flowers-experimental" #@param {type:"string"}
REGION = "us-central1" #@param {type:"string"}
!gsutil mb -l {REGION} {BUCKET_PATH}
!gsutil -m cp -r flower_photos {BUCKET_PATH}impor... |
sayakpaul/Dual-Deployments-on-Vertex-AI | notebooks/Dual_Deployments_With_AutoML.ipynb | import os
# The Google Cloud Notebook product has specific requirements
IS_GOOGLE_CLOUD_NOTEBOOK = os.path.exists("/opt/deeplearning/metadata/env_version")
# Google Cloud Notebook requires dependencies to be installed with '--user'
USER_FLAG = ""
if IS_GOOGLE_CLOUD_NOTEBOOK:
USER_FLAG = "--user"# Automatically re... |
sayakpaul/Dual-Deployments-on-Vertex-AI | notebooks/Model_Tests.ipynb | from io import BytesIO
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
import requests
import base64
from google.cloud.aiplatform.gapic.schema import predict
from google.cloud import aiplatform
import tensorflow as tfdef preprocess_image(image):
"""Preprocesses an image."""
image = np.... |
- Downloads last month
- 13