import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch8.ElasticsearchSink;
import org.apache.flink.streaming.connectors.elasticsearch8.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch8.RequestIndexer;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FlinkSinkES8Example {
public static void main(String[] args) throws Exception {
// 创建 Flink 执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// 创建 Elasticsearch 客户端
RestClientBuilder restClientBuilder = RestClient.builder(new HttpHost("localhost", 9200));
RestClient restClient = restClientBuilder.build();
// 创建 Elasticsearch Sink 函数
ElasticsearchSinkFunction<String> sinkFunction = new ElasticsearchSinkFunction<String>() {
@Override
public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
// 将数据转换为 Elasticsearch 文档
Map<String, Object> document = new HashMap<>();
document.put("message", element);
// 将文档添加到 Elasticsearch 索引中
indexer.add(document);
}
};
// 创建 Elasticsearch Sink
ElasticsearchSink<String> sink = new ElasticsearchSink.Builder<String>(restClient, sinkFunction)
.setBulkFlushMaxActions(1) // 批量写入的最大操作数
.setBulkFlushInterval(5000) // 批量写入的最大时间间隔
.build();
// 将数据流写入 Elasticsearch Sink
DataStream<String> dataStream = env.fromElements("Hello, world!", "This is a test message.");
dataStream.addSink(sink);
// 执行 Flink 作业
env.execute("Flink Sink ES8 Example");
}
}
版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。
实时计算Flink版是阿里云提供的全托管Serverless Flink云服务,基于 Apache Flink 构建的企业级、高性能实时大数据处理系统。提供全托管版 Flink 集群和引擎,提高作业开发运维效率。