|
| 1 | +/* |
| 2 | + * Copyright 2014 Michal Harish, [email protected] |
| 3 | + * |
| 4 | + * Licensed to the Apache Software Foundation (ASF) under one or more |
| 5 | + * contributor license agreements. See the NOTICE file distributed with |
| 6 | + * this work for additional information regarding copyright ownership. |
| 7 | + * The ASF licenses this file to You under the Apache License, Version 2.0 |
| 8 | + * (the "License"); you may not use this file except in compliance with |
| 9 | + * the License. You may obtain a copy of the License at |
| 10 | + * |
| 11 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 12 | + * |
| 13 | + * Unless required by applicable law or agreed to in writing, software |
| 14 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 15 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 16 | + * See the License for the specific language governing permissions and |
| 17 | + * limitations under the License. |
| 18 | + */ |
| 19 | + |
| 20 | +package io.amient.kafka.hadoop; |
| 21 | + |
| 22 | +import io.amient.kafka.hadoop.io.KafkaInputFormat; |
| 23 | +import io.amient.kafka.hadoop.io.MultiOutputFormat; |
| 24 | +import io.amient.kafka.hadoop.testutils.MyJsonTimestampExtractor; |
| 25 | +import io.amient.kafka.hadoop.testutils.SystemTestBase; |
| 26 | +import kafka.producer.KeyedMessage; |
| 27 | +import org.apache.hadoop.fs.Path; |
| 28 | +import org.junit.Test; |
| 29 | + |
| 30 | +import java.io.IOException; |
| 31 | + |
| 32 | +import static org.junit.Assert.assertEquals; |
| 33 | +import static org.junit.Assert.assertTrue; |
| 34 | + |
| 35 | +public class TimestampExtractorSystemTest extends SystemTestBase { |
| 36 | + |
| 37 | + |
| 38 | + @Test |
| 39 | + public void canUseTimestampInPartitions() throws IOException, ClassNotFoundException, InterruptedException { |
| 40 | + |
| 41 | + //produce some json data |
| 42 | + String message5 = "{\"version\":5,\"timestamp\":1402944501425,\"id\": 1}"; |
| 43 | + simpleProducer.send(new KeyedMessage<>("topic02", "1", message5)); |
| 44 | + String message1 = "{\"version\":1,\"timestamp\":1402945801425,\"id\": 2}"; |
| 45 | + simpleProducer.send(new KeyedMessage<>("topic02", "2", message1)); |
| 46 | + String message6 = "{\"version\":6,\"timestamp\":1402948801425,\"id\": 1}"; |
| 47 | + simpleProducer.send(new KeyedMessage<>("topic02", "1", message6)); |
| 48 | + //testing a null message - with timestamp extractor this means skip message |
| 49 | + simpleProducer.send(new KeyedMessage<>("topic02", "1", (String)null)); |
| 50 | + |
| 51 | + //configure inputs, timestamp extractor and the output path format |
| 52 | + KafkaInputFormat.configureKafkaTopics(conf, "topic02"); |
| 53 | + KafkaInputFormat.configureZkConnection(conf, zkConnect); |
| 54 | + HadoopJobMapper.configureTimestampExtractor(conf, MyJsonTimestampExtractor.class.getName()); |
| 55 | + MultiOutputFormat.configurePathFormat(conf, "'t={T}/d='yyyy-MM-dd'/h='HH"); |
| 56 | + |
| 57 | + Path outDir = runSimpleJob("topic02", "canUseTimestampInPartitions"); |
| 58 | + |
| 59 | + Path h18 = new Path(outDir, "t=topic02/d=2014-06-16/h=18/topic02-1-0000000000000000000"); |
| 60 | + assertTrue(localFileSystem.exists(h18)); |
| 61 | + assertEquals(String.format("%s%n", message5), readFullyAsString(h18, 100)); |
| 62 | + |
| 63 | + Path h19 = new Path(outDir, "t=topic02/d=2014-06-16/h=19/topic02-0-0000000000000000000"); |
| 64 | + assertTrue(localFileSystem.exists(h19)); |
| 65 | + assertEquals(String.format("%s%n", message1), readFullyAsString(h19, 100)); |
| 66 | + |
| 67 | + Path h20 = new Path(outDir, "t=topic02/d=2014-06-16/h=20/topic02-1-0000000000000000000"); |
| 68 | + assertTrue(localFileSystem.exists(h20)); |
| 69 | + assertEquals(String.format("%s%n", message6), readFullyAsString(h20, 100)); |
| 70 | + } |
| 71 | + |
| 72 | + |
| 73 | +} |
0 commit comments