Skip to content

Commit

Permalink
[gcp-cloud-run] WIP: implement GCP cloud run
Browse files Browse the repository at this point in the history
  • Loading branch information
ntorionbearstudio committed Jul 28, 2020
1 parent 944afef commit ad3891d
Show file tree
Hide file tree
Showing 9 changed files with 8,133 additions and 0 deletions.
67 changes: 67 additions & 0 deletions technologies/job/gcp-cloud-run/default/context.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
id: default
label: Default
description: ""
recommended: true
trustLevel: experimental
endpoint:
features:
- type: TEXT
name: jsonKey
label: JSON key
required: true
helper: "Full JSON document"
job:
features:
- type: ENDPOINT
name: endpoint
label: Endpoint
required: true
- type: SELECT
name: project
label: Project
required: true
options:
script: ./jobForm.js
function: getProjects
dependsOn:
- endpoint
- type: SELECT
name: region
label: Region
required: true
options:
script: ./jobForm.js
function: getRegions
dependsOn:
- project
- type: TEXT
name: templatePath
label: Template path
required: true
helper: "A Cloud Storage path to the template from which to create the job. Must be valid Cloud Storage URL, beginning with 'gs://'"
dependsOn:
- region
- type: TEXT
name: jobName
label: Job name
required: true
dependsOn:
- region
- type: TEXT
name: jsonParameters
label: Parameters JSON
required: false
helper: "Parameters for job"
dependsOn:
- region
instance:
actions:
onStart:
script: ./instanceActions.js
function: start
getStatus:
script: ./instanceActions.js
function: getStatus
getLogs:
script: ./instanceActions.js
function: getLogs
119 changes: 119 additions & 0 deletions technologies/job/gcp-cloud-run/default/instanceActions.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
const { Response, JobStatus, Log, Stream } = require('@saagie/sdk');
const { google } = require('googleapis');
const dataflow = google.dataflow('v1b3');
const logging = google.logging('v2');
const run = google.run('v1');
const { getAuth, getErrorMessage } = require('../utils');
const { JOB_STATUS } = require('../job-states');

/**
* Logic to retrieve the external job instance status.
* @param {Object} params
* @param {Object} params.job - Contains job data including featuresValues.
* @param {Object} params.instance - Contains instance data including the payload returned in the start function.
*/
exports.start = async ({ job }) => {
try {
const gcpKey = JSON.parse(job.featuresValues.endpoint.jsonKey);

const auth = getAuth(gcpKey);

const res = await run.namespaces.services.create({
auth,
parent: `namespaces/${job.featuresValues.project.id}`,
requestBody: {
apiVersion: 'serving.knative.dev/v1',
kind: 'Service',
metadata: {
name: 'test',
namespace: job.featuresValues.project.id,
},
spec: {
template: {
spec: {
containers: [
{
image: 'gcr.io/cloudrun/hello',
env: [],
}
],
},
},
traffic: {
percent: 100,
latestRevision: true,
}
}
}
});

console.log({ res });

return Response.success();
} catch (error) {
console.log(error.response.data);
console.log(error.response.data.error.errors);
return getErrorMessage(error, 'Failed to run GCP Dataflow job');
}
};

/**
* Logic to retrieve the external job instance status.
* @param {Object} params
* @param {Object} params.job - Contains job data including featuresValues.
* @param {Object} params.instance - Contains instance data including the payload returned in the start function.
*/
exports.getStatus = async ({ job, instance }) => {
try {
const gcpKey = JSON.parse(job.featuresValues.endpoint.jsonKey);

const auth = getAuth(gcpKey);

const { data } = await dataflow.projects.locations.jobs.get({
auth,
projectId : job.featuresValues.project.id,
location : job.featuresValues.region.id,
jobId: instance.payload.newJob.id,
});

return Response.success(JOB_STATUS[data.currentState] || JobStatus.AWAITING);
} catch (error) {
return getErrorMessage(error, 'Failed to get status for GCP Dataflow job ');
}
};

/**
* Logic to retrieve the external job instance logs.
* @param {Object} params
* @param {Object} params.job - Contains job data including featuresValues.
* @param {Object} params.instance - Contains instance data including the payload returned in the start function.
*/
exports.getLogs = async ({ job, instance }) => {
try {
const gcpKey = JSON.parse(job.featuresValues.endpoint.jsonKey);

const auth = getAuth(gcpKey);

const resLogging = await logging.entries.list({
requestBody: {
filter: `resource.type="dataflow_step" resource.labels.job_id="${instance.payload.newJob.id}" logName="projects/${job.featuresValues.project.id}/logs/dataflow.googleapis.com%2Fjob-message"`,
orderBy: "timestamp desc",
resourceNames: [`projects/${job.featuresValues.project.id}`]
},
auth
});

if (
resLogging
&& resLogging.data
&& resLogging.data.entries
&& resLogging.data.entries.length > 0
) {
return Response.success(resLogging.data.entries.reverse().map(({ timestamp, textPayload }) => Log(textPayload, Stream.STDOUT, timestamp)));
}

return Response.empty();
} catch (error) {
return getErrorMessage(error, 'Failed to get logs for GCP Dataflow job');
}
};
66 changes: 66 additions & 0 deletions technologies/job/gcp-cloud-run/default/jobForm.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
const { Response } = require('@saagie/sdk');
const { google } = require('googleapis');
const cloudfunctions = google.cloudfunctions('v1');
const cloudresourcemanager = google.cloudresourcemanager('v1');
const { getAuth, getErrorMessage } = require('../utils');

/**
* Function to retrieve projects for the authenticated user.
* @param {Object} entity - Contains entity data including featuresValues.
* @param {Object} entity.featuresValues - Contains all the values from the entity features declared in the context.yaml
*/
exports.getProjects = async ({ featuresValues }) => {
const gcpKey = JSON.parse(featuresValues.endpoint.jsonKey);

try{
const auth = getAuth(gcpKey);

const { data: { projects } } = await cloudresourcemanager.projects.list({
auth,
});

if (!projects || !projects.length) {
return Response.empty('No projects availables')
}

return Response.success(
projects.map(({ projectId, name }) => ({
id: projectId,
label: name,
})),
);
} catch (error) {
return getErrorMessage(error, "Can't retrieve projects");
}
};

/**
* Function to retrieve regions options for a defined project.
* @param {Object} entity - Contains entity data including featuresValues.
* @param {Object} entity.featuresValues - Contains all the values from the entity features declared in the context.yaml
*/
exports.getRegions = async ({ featuresValues }) => {
const gcpKey = JSON.parse(featuresValues.endpoint.jsonKey);

try{
const auth = getAuth(gcpKey);

const { data: { locations } } = await cloudfunctions.projects.locations.list({
auth,
name : `projects/${featuresValues.project.id}`,
});

if (!locations || !locations.length) {
return Response.empty('No regions availables')
}

return Response.success(
locations.map(({locationId}) => ({
id: locationId,
label: locationId,
})),
);
} catch (error) {
return getErrorMessage(error, "Can't retrieve regions");
}
};
16 changes: 16 additions & 0 deletions technologies/job/gcp-cloud-run/job-states.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { JobStatus } from "@saagie/sdk";

export const JOB_STATUS = {
'JOB_STATE_STOPPED': JobStatus.KILLED,
'JOB_STATE_RUNNING': JobStatus.RUNNING,
'JOB_STATE_DONE': JobStatus.SUCCEEDED,
'JOB_STATE_FAILED': JobStatus.FAILED,
'JOB_STATE_CANCELLED': JobStatus.KILLED,
'JOB_STATE_UPDATED': JobStatus.KILLED,
'JOB_STATE_DRAINING': JobStatus.KILLING,
'JOB_STATE_DRAINED': JobStatus.KILLED,
'JOB_STATE_PENDING': JobStatus.QUEUED,
'JOB_STATE_CANCELLING': JobStatus.KILLING,
'JOB_STATE_QUEUED': JobStatus.QUEUED,
'JOB_STATE_UNKNOWN': JobStatus.AWAITING,
};
76 changes: 76 additions & 0 deletions technologies/job/gcp-cloud-run/metadata.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
version: v1
id: gcp-dataflow
label: GCP Dataflow
description: "GCP Dataflow Saagie implementation"
available: true
type: EXTERNAL_JOB
icon: job

contexts:
- id: default
label: Default
description: ""
recommended: true
trustLevel: experimental
endpoint:
features:
- type: TEXT
name: jsonKey
label: JSON key
required: true
helper: "Full JSON document"
job:
features:
- type: ENDPOINT
name: endpoint
label: Endpoint
required: true
- type: SELECT
name: project
label: Project
required: true
options:
script: ./jobForm.js
function: getProjects
dependsOn:
- endpoint
- type: SELECT
name: region
label: Region
required: true
options:
script: ./jobForm.js
function: getRegions
dependsOn:
- project
- type: TEXT
name: templatePath
label: Template path
required: true
helper: "A Cloud Storage path to the template from which to create the job. Must be valid Cloud Storage URL, beginning with 'gs://'"
dependsOn:
- region
- type: TEXT
name: jobName
label: Job name
required: true
dependsOn:
- region
- type: TEXT
name: jsonParameters
label: Parameters JSON
required: false
helper: "Parameters for job"
dependsOn:
- region
instance:
actions:
onStart:
script: ./instanceActions.js
function: start
getStatus:
script: ./instanceActions.js
function: getStatus
getLogs:
script: ./instanceActions.js
function: getLogs
Loading

0 comments on commit ad3891d

Please sign in to comment.