问题描述
我需要从Cloud Functions触发Cloud Dataflow管道。但是Cloud函数必须用Java编写。因此,云端功能的触发器是Google云端存储的完成/创建事件,即,当文件上传到GCS存储桶中时,云端功能必须触发云端数据流。
当我创建数据流管道(批处理)并执行管道时,它会创建一个数据流管道模板并创建一个数据流作业。
但是当我使用Java创建云函数并上传文件时,状态只是显示“确定”,但不会触发数据流管道。
云功能
package com.example;
import com.example.Example.GCSEvent;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpRequestinitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.dataflow.Dataflow;
import com.google.api.services.dataflow.model.CreateJobFromTemplateRequest;
import com.google.api.services.dataflow.model.RuntimeEnvironment;
import com.google.auth.http.HttpCredentialsAdapter;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.cloud.functions.BackgroundFunction;
import com.google.cloud.functions.Context;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.HashMap;
import java.util.logging.Logger;
public class Example implements BackgroundFunction<GCSEvent> {
private static final Logger logger = Logger.getLogger(Example.class.getName());
@Override
public void accept(GCSEvent event,Context context) throws IOException,GeneralSecurityException {
logger.info("Event: " + context.eventId());
logger.info("Event Type: " + context.eventType());
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
GoogleCredentials credentials = GoogleCredentials.getApplicationDefault();
HttpRequestinitializer requestinitializer = new HttpCredentialsAdapter(credentials);
Dataflow dataflowService = new Dataflow.Builder(httpTransport,jsonFactory,requestinitializer)
.setApplicationName("Google Dataflow function Demo")
.build();
String projectId = "my-project-id";
RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment();
runtimeEnvironment.setBypasstempDirValidation(false);
runtimeEnvironment.setTempLocation("gs://my-dataflow-job-bucket/tmp");
CreateJobFromTemplateRequest createJobFromTemplateRequest = new CreateJobFromTemplateRequest();
createJobFromTemplateRequest.setEnvironment(runtimeEnvironment);
createJobFromTemplateRequest.setLocation("us-central1");
createJobFromTemplateRequest.setGcsPath("gs://my-dataflow-job-bucket-staging/templates/cloud-dataflow-template");
createJobFromTemplateRequest.setJobName("Dataflow-Cloud-Job");
createJobFromTemplateRequest.setParameters(new HashMap<String,String>());
createJobFromTemplateRequest.getParameters().put("inputFile","gs://cloud-dataflow-bucket-input/*.txt");
dataflowService.projects().templates().create(projectId,createJobFromTemplateRequest);
throw new UnsupportedOperationException("Not supported yet.");
}
public static class GCSEvent {
String bucket;
String name;
String Metageneration;
}
}
pom.xml(云功能)
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cloudfunctions</groupId>
<artifactId>http-function</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.target>11</maven.compiler.target>
<maven.compiler.source>11</maven.compiler.source>
</properties>
<dependencies>
<!-- https://mvnrepository.com/artifact/com.google.auth/google-auth-library-credentials -->
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-credentials</artifactId>
<version>0.21.1</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-dataflow</artifactId>
<version>v1b3-rev207-1.20.0</version>
</dependency>
<dependency>
<groupId>com.google.cloud.functions</groupId>
<artifactId>functions-framework-api</artifactId>
<version>1.0.1</version>
</dependency>
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>0.21.1</version>
</dependency>
</dependencies>
<!-- required for Java 11 functions in the inline editor -->
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<excludes>
<exclude>.google/</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>
云功能日志
我浏览了以下博客(仅供参考),它们通过云功能触发了来自云存储的数据流。但是代码是用Node.js或python编写的。 但是我的云函数必须用Java编写。
通过Node.js中的云功能触发数据流管道
https://dzone.com/articles/triggering-dataflow-pipelines-with-cloud-functions
使用python通过云函数触发数据流管道
https://medium.com/google-cloud/how-to-kick-off-a-dataflow-pipeline-via-cloud-functions-696927975d4e
对此非常感谢。
解决方法
RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment();
runtimeEnvironment.setBypassTempDirValidation(false);
runtimeEnvironment.setTempLocation("gs://karthiksfirstbucket/temp1");
LaunchTemplateParameters launchTemplateParameters = new LaunchTemplateParameters();
launchTemplateParameters.setEnvironment(runtimeEnvironment);
launchTemplateParameters.setJobName("newJob" + (new Date()).getTime());
Map<String,String> params = new HashMap<String,String>();
params.put("inputFile","gs://karthiksfirstbucket/sample.txt");
params.put("output","gs://karthiksfirstbucket/count1");
launchTemplateParameters.setParameters(params);
writer.write("4");
Dataflow.Projects.Templates.Launch launch = dataflowService.projects().templates().launch(projectId,launchTemplateParameters);
launch.setGcsPath("gs://dataflow-templates-us-central1/latest/Word_Count");
launch.execute();
上面的代码启动一个模板并执行数据流管道
- 使用应用程序默认凭据(可以更改为用户凭据或服务凭据)
- region是默认区域(可以更改)。
- 为每个HTTP触发器创建一个作业(可以更改触发器)。
完整的代码可以在下面找到:
https://github.com/karthikeyan1127/Java_CloudFunction_DataFlow/blob/master/Hello.java