import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler, StandardScaler
# Import this module's functions
from functions import map_num_to_string, map_string_to_num, sparse_array
Loading saved model and scalers
from pickle import load
# Load model
model = tf.keras.models.load_model('saved/predictor-conductivity-model')
# Load scalers
X_scaler = load(open('saved/X_scaler.pkl', 'rb'))
Y_scaler = load(open('saved/Y_scaler.pkl', 'rb'))
Exported models could be a TensorFlow SavedModel
Export custom pipeline code: If you need your custom code and functions.
More information on tensorflow.org
tf.saved_model.save(model, 'deploy_cloud/')
INFO:tensorflow:Assets written to: deploy_cloud/assets
To start working with GCP, We need a new project to deploy the model.
This project named Nanocomposite Conductivity with ID nanocomposite-conductivity on GCP.
The model (saved) files must be uploaded to cloud storage provided by Google.
To ease process, you WEB interface at console.cloud.google.com
The new bucket was created with path is electrical-conductivity-nanocomposite/deploy_cloud
Open ai-platform Website. You might need to enable API first.
Then create a model. This project name model electrical_conductivity_nanocomposite.
Next, a version must be created.
We can choose Pre-built container which supports TensorFlow.
Browse Model URI to where model was uploaded on Bucket.
You can optionally add GPUs to accelerate each prediction node GPUs which incur additional costs.
We can quickly test the model at console.cloud.google.com once the version is created. Note that the creation of version (step above) takes (long) time to finish.
Try this input (X features were already scaled):
{"instances":[ [1.0, 0.5, 0.10409017] ]}
We should expect $-0.778929830$ as local deploy predicted. In my test case, it works smoothly.
To run the model remotely, we need to set right permission to access to model online.
To set up authentication, you need to create a service account key and set an environment variable for the file path to the service account key.
role field: select AI Platform > AI Platform Admin and Storage > Storage Object Admin
Create A JSON key file and download to your computer. Rename to credentials.json if wanted.
oauth2client, googleapiclient, google.api_coreimport googleapiclient.discovery
from google.api_core.client_options import ClientOptions
from oauth2client.client import GoogleCredentials
def predict_json(
project, model, instances, CREDENTIALS_FILE, region, version=None
):
"""Send json data to a deployed model for prediction.
Args:
project (str): project where the Cloud ML Engine Model is deployed.
region (str): regional endpoint; set to None for ml.googleapis.com
model (str): model name.
instances ([Mapping[str: Any]]): Keys should be the names of Tensors
your deployed model expects as inputs. Values should be datatypes
convertible to Tensors, or (potentially nested) lists of datatypes
convertible to tensors.
version: str, version of the model to target.
Returns:
Mapping[str: any]: dictionary of prediction results defined by the
model.
"""
# Create the ML Engine service object.
# To authenticate set the environment variable
# GOOGLE_APPLICATION_CREDENTIALS=<path_to_service_account_file>
credentials = GoogleCredentials.from_stream(CREDENTIALS_FILE)
prefix = "{}-ml".format(region) if region else "ml"
api_endpoint = "https://{}.googleapis.com".format(prefix)
client_options = ClientOptions(api_endpoint=api_endpoint)
service = googleapiclient.discovery.build(
"ml", "v1", client_options=client_options, credentials=credentials
)
name = "projects/{}/models/{}".format(project, model)
if version is not None:
name += "/versions/{}".format(version)
response = (
service.projects()
.predict(name=name, body={"instances": instances})
.execute()
)
if "error" in response:
raise RuntimeError(response["error"])
return response["predictions"]
CREDENTIALS_FILE = "credentials.json"
PROJECT_ID = "nanocomposite-conductivity"
MODEL_NAME = "electrical_conductivity_nanocomposite"
# These are the values we want a prediction for
inputs_for_prediction = [[1.0, 0.5, 0.10409017]]
predict_json(
PROJECT_ID,
MODEL_NAME,
inputs_for_prediction,
CREDENTIALS_FILE,
"us-central1",
)
[[-0.77892983]]
oauth2client
conda install oauth2client
googleapiclient
conda install google-api-python-client