from ai_core_sdk.models import Artifact
from ai_core_sdk.ai_core_v2_client import AICoreV2Client
ai_core_client = AICoreV2Client(
# `AI_API_URL`
base_url = "<base url>" + "/v2", # The present SAP AI Core API version is 2
# `URL`
auth_url= "<auth url>" + "/oauth/token",
# `clientid`
client_id = "<client id>",
# `clientsecret`
client_secret = "<client secret>"
)
response = ai_core_client.resource_groups.create("aribot")
import logging
import boto3
from botocore.exceptions import ClientError
import os
def upload_file_to_s3(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = os.path.basename(file_name)
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
# logging.error(e)
return False
return True
model_filename = 'keras_model.h5'
res = upload_file_to_s3(img_filename, '<S3 BUCKET NAME>', f'aribot/model/{model_filename}')
aws s3 ls s3://<BUCKET NAME>/aribot/model/
response = ai_core_client.object_store_secrets.create(
resource_group = 'aribot',
type = "S3",
name = "aribot-secret",
path_prefix = "aribot",
endpoint = "<fill up>",
bucket = "<fill up>",
region = "<fill up>",
data = {
"AWS_ACCESS_KEY_ID": "<fill up>",
"AWS_SECRET_ACCESS_KEY": "<fill up>"
}
)
print(response.__dict__)
response = ai_core_client.artifact.create(
resource_group = 'aribot',
name = "aribot-model",
kind = Artifact.Kind.MODEL,
url = "ai://aribot-secret/model",
scenario_id = "aribot-run-1",
description = "Aribot model run 1"
)
print(response.__dict__)
artifact_id = response.__dict__['id']
docker login <YOUR_DOCKER_REGISTRY> -u <YOUR_DOCKER_USERNAME>
docker build -t docker.io/<YOUR_DOCKER_USERNAME>/aribot:0.0.1 .
docker push docker.io/< YOUR_DOCKER_USERNAME>/aribot:0.0.1
apiVersion: ai.sap.com/v1alpha1
kind: ServingTemplate
metadata:
name: aribot-classifier-1 #Enter a unique name here
annotations:
scenarios.ai.sap.com/description: "Aribot image classification"
scenarios.ai.sap.com/name: "aribot-image-clf"
executables.ai.sap.com/description: "Aribot Tensorlfow GPU Serving executable"
executables.ai.sap.com/name: "aribot-serve-exectuable"
labels:
scenarios.ai.sap.com/id: "aribot-clf"
ai.sap.com/version: "1.0.0"
spec:
inputs:
artifacts:
- name: modelArtifact
template:
apiVersion: "serving.kubeflow.org/v1beta1"
metadata:
labels: |
ai.sap.com/resourcePlan: starter
spec: |
predictor:
imagePullSecrets:
- name: docker-registry-secret
containers:
- name: kfserving-container
image: "docker.io/<YOUR_DOCKER_USERNAME>/aribot:0.0.1"
ports:
- containerPort: 9001
protocol: TCP
env:
- name: STORAGE_URI
value: "{{inputs.artifacts.modelArtifact}}"
response = ai_core_client.applications.create(
application_name = "aribot-clf-app-1",
revision = "HEAD",
repository_url = "https://github.com/<GITHUB_USERNAME>/aicore-pipelines",
path = "aribot"
)
print(response.__dict__)
response = ai_core_client.applications.get_status(application_name = 'aribot-clf-app-1')
print(response.__dict__)
print('*'*80)
print(response.sync_ressources_status[0].__dict__)
img_filename = './raw_images/IMG_5694.JPG' #images are stored in the raw_image folder in your local machine.
def get_img_byte_str(): #returns img in base 64 encoded
from io import BytesIO
import base64
with open(img_filename, "rb") as fh: #rb - read img as raw bytes
buf = BytesIO(fh.read()) #reading file and constructing bytes io obj from the file
img_data = base64.b64encode(buf.getvalue()).decode() #so can send over the network
return img_data #base 64 encoded
base64_data = get_img_byte_str()
import requests
# URL
deployment_url = "<INSERT_YOUR_DEPLOYMENT_URL>" + “/v1/predict” #need to append endpoint
# Preparing the input for inference
test_input = {'imgData':get_img_byte_str()}
endpoint = f"{deployment_url}" # endpoint implemented in serving engine
headers = {"Authorization": ai_core_client.rest_client.get_token(),
'ai-resource-group': "aribot",
"Content-Type": "application/json"}
response = requests.post(endpoint, headers=headers, json=test_input)
print('Inference result:', response.content)
Inference result: b'{"Prediction": "Adapter Pk2", "Probability": "1.0"}
You must be a registered user to add a comment. If you've already registered, sign in. Otherwise, register and sign in.
User | Count |
---|---|
2 | |
1 | |
1 | |
1 | |
1 |