-
Notifications
You must be signed in to change notification settings - Fork 127
/
main.py
63 lines (48 loc) · 2.11 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Function to be triggered by Pub/Sub."""
import os
import json
import logging
from kfp.v2.google.client import AIPlatformClient
from google.cloud import storage
import base64
def trigger_pipeline(event, context):
project = os.getenv("PROJECT")
region = os.getenv("REGION")
gcs_pipeline_file_location = os.getenv("GCS_PIPELINE_FILE_LOCATION")
if not project:
raise ValueError("Environment variable PROJECT is not set.")
if not region:
raise ValueError("Environment variable REGION is not set.")
if not gcs_pipeline_file_location:
raise ValueError("Environment variable GCS_PIPELINE_FILE_LOCATION is not set.")
storage_client = storage.Client()
if not gcs_pipeline_file_location:
raise ValueError("Environment variable GCS_PIPELINE_FILE_LOCATION is not set.")
path_parts = gcs_pipeline_file_location.replace("gs://", "").split("/")
bucket_name = path_parts[0]
blob_name = "/".join(path_parts[1:])
bucket = storage_client.bucket(bucket_name)
blob = storage.Blob(bucket=bucket, name=blob_name)
if not blob.exists(storage_client):
raise ValueError(f"{gcs_pipeline_file_location} does not exist.")
data = base64.b64decode(event["data"]).decode("utf-8")
logging.info(f"Event data: {data}")
parameter_values = json.loads(data)
api_client = AIPlatformClient(project_id=project, region=region)
response = api_client.create_run_from_job_spec(
job_spec_path=gcs_pipeline_file_location, parameter_values=parameter_values
)
logging.info(response)