/* * Copyright (c) 2016 Uber Technologies, Inc. (hoodie-dev-group@uber.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.uber.hoodie.func; import com.uber.hoodie.config.HoodieWriteConfig; import com.uber.hoodie.WriteStatus; import com.uber.hoodie.common.model.HoodieRecord; import com.uber.hoodie.common.model.HoodieRecordPayload; import com.uber.hoodie.common.model.HoodieTableMetadata; import org.apache.spark.api.java.function.Function2; import java.util.Iterator; import java.util.List; /** * Map function that handles a sorted stream of HoodieRecords */ public class InsertMapFunction implements Function2>, Iterator>> { private String commitTime; private HoodieWriteConfig config; private HoodieTableMetadata metadata; public InsertMapFunction(String commitTime, HoodieWriteConfig config, HoodieTableMetadata metadata) { this.commitTime = commitTime; this.config = config; this.metadata = metadata; } @Override public Iterator> call(Integer partition, Iterator> sortedRecordItr) throws Exception { return new LazyInsertIterable<>(sortedRecordItr, config, commitTime, metadata); } }