/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.polimi.middleware.projects.flink; import org.apache.flink.api.java.ExecutionEnvironment; /** * Skeleton for a Flink Batch Job. * *

For a tutorial how to write a Flink batch application, check the * tutorials and examples on the Flink Website. * *

To package your application into a JAR file for execution, * change the main class in the POM.xml file to this class (simply search for 'mainClass') * and run 'mvn clean package' on the command line. */ public class BatchJob { public static void main(String[] args) throws Exception { // set up the batch execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); /* * Here, you can start creating your execution plan for Flink. * * Start with getting some data from the environment, like * env.readTextFile(textPath); * * then, transform the resulting DataSet using operations * like * .filter() * .flatMap() * .join() * .coGroup() * * and many more. * Have a look at the programming guide for the Java API: * * http://flink.apache.org/docs/latest/apis/batch/index.html * * and the examples * * http://flink.apache.org/docs/latest/apis/batch/examples.html * */ // execute program env.execute("Flink Batch Java API Skeleton"); } }