|
| 1 | +/** |
| 2 | + * Licensed to the Apache Software Foundation (ASF) under one |
| 3 | + * or more contributor license agreements. See the NOTICE file |
| 4 | + * distributed with this work for additional information |
| 5 | + * regarding copyright ownership. The ASF licenses this file |
| 6 | + * to you under the Apache License, Version 2.0 (the |
| 7 | + * "License"); you may not use this file except in compliance |
| 8 | + * with the License. You may obtain a copy of the License at |
| 9 | + * |
| 10 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | + * |
| 12 | + * Unless required by applicable law or agreed to in writing, software |
| 13 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | + * See the License for the specific language governing permissions and |
| 16 | + * limitations under the License. |
| 17 | + */ |
| 18 | + |
| 19 | +package org.apache.falcon.regression.SLA; |
| 20 | + |
| 21 | +import org.apache.falcon.entity.v0.EntityType; |
| 22 | +import org.apache.falcon.entity.v0.Frequency; |
| 23 | +import org.apache.falcon.regression.Entities.FeedMerlin; |
| 24 | +import org.apache.falcon.regression.core.bundle.Bundle; |
| 25 | +import org.apache.falcon.regression.core.helpers.ColoHelper; |
| 26 | +import org.apache.falcon.regression.core.response.ServiceResponse; |
| 27 | +import org.apache.falcon.regression.core.util.HadoopUtil; |
| 28 | +import org.apache.falcon.regression.core.util.InstanceUtil; |
| 29 | +import org.apache.falcon.regression.core.util.TimeUtil; |
| 30 | +import org.apache.falcon.regression.core.util.AssertUtil; |
| 31 | +import org.apache.falcon.regression.core.util.BundleUtil; |
| 32 | +import org.apache.falcon.regression.testHelper.BaseTestClass; |
| 33 | +import org.apache.falcon.resource.SchedulableEntityInstance; |
| 34 | +import org.apache.falcon.resource.SchedulableEntityInstanceResult; |
| 35 | +import org.apache.hadoop.fs.FileSystem; |
| 36 | +import org.apache.log4j.Logger; |
| 37 | +import org.joda.time.DateTime; |
| 38 | +import org.testng.Assert; |
| 39 | +import org.testng.annotations.AfterMethod; |
| 40 | +import org.testng.annotations.BeforeMethod; |
| 41 | +import org.testng.annotations.Test; |
| 42 | + |
| 43 | +import java.io.IOException; |
| 44 | +import java.util.List; |
| 45 | +import java.util.Arrays; |
| 46 | +import java.util.Comparator; |
| 47 | +import java.util.HashMap; |
| 48 | +import java.util.Map; |
| 49 | +import java.util.ArrayList; |
| 50 | +import java.util.Collections; |
| 51 | + |
| 52 | + |
| 53 | +/** |
| 54 | + * Feed SLA monitoring tests. |
| 55 | + * Test assumes following properties are set in startup.properties of server : |
| 56 | + * *.feed.sla.statusCheck.frequency.seconds=60 |
| 57 | + * *.feed.sla.lookAheadWindow.millis=60000 |
| 58 | + */ |
| 59 | +@Test(groups = { "distributed", "embedded" }) |
| 60 | +public class FeedSLAMonitoringTest extends BaseTestClass { |
| 61 | + |
| 62 | + private ColoHelper cluster = servers.get(0); |
| 63 | + private FileSystem clusterFS = serverFS.get(0); |
| 64 | + private String baseTestHDFSDir = cleanAndGetTestDir(); |
| 65 | + private String feedInputPath = baseTestHDFSDir + "/input" + MINUTE_DATE_PATTERN; |
| 66 | + private List<String> slaFeedNames; |
| 67 | + private List<Frequency> slaFeedFrequencies; |
| 68 | + private String clusterName; |
| 69 | + private static final Logger LOGGER = Logger.getLogger(FeedSLAMonitoringTest.class); |
| 70 | + |
| 71 | + private String startTime; |
| 72 | + private String endTime; |
| 73 | + private String slaStartTime; |
| 74 | + private String slaEndTime; |
| 75 | + private int noOfFeeds; |
| 76 | + private int statusCheckFrequency; |
| 77 | + |
| 78 | + private static final Comparator<SchedulableEntityInstance> DEPENDENCY_COMPARATOR = |
| 79 | + new Comparator<SchedulableEntityInstance>() { |
| 80 | + @Override |
| 81 | + public int compare(SchedulableEntityInstance o1, SchedulableEntityInstance o2) { |
| 82 | + return o1.compareTo(o2); |
| 83 | + } |
| 84 | + }; |
| 85 | + |
| 86 | + /** |
| 87 | + * Submitting 3 feeds with different frequencies and sla values. |
| 88 | + * @throws Exception |
| 89 | + */ |
| 90 | + @BeforeMethod(alwaysRun = true) |
| 91 | + public void setup() throws Exception { |
| 92 | + |
| 93 | + bundles[0] = BundleUtil.readELBundle(); |
| 94 | + bundles[0] = new Bundle(bundles[0], cluster); |
| 95 | + bundles[0].generateUniqueBundle(this); |
| 96 | + bundles[0].setInputFeedDataPath(feedInputPath); |
| 97 | + clusterName = bundles[0].getClusterNames().get(0); |
| 98 | + ServiceResponse response = |
| 99 | + prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0)); |
| 100 | + AssertUtil.assertSucceeded(response); |
| 101 | + |
| 102 | + startTime = TimeUtil.getTimeWrtSystemTime(-10); |
| 103 | + endTime = TimeUtil.addMinsToTime(startTime, 20); |
| 104 | + noOfFeeds=3; |
| 105 | + |
| 106 | + LOGGER.info("Time range between : " + startTime + " and " + endTime); |
| 107 | + final String oldFeedName = bundles[0].getInputFeedNameFromBundle(); |
| 108 | + slaFeedFrequencies = Arrays.asList(new Frequency("1", Frequency.TimeUnit.minutes), |
| 109 | + new Frequency("2", Frequency.TimeUnit.minutes), |
| 110 | + new Frequency("4", Frequency.TimeUnit.minutes)); |
| 111 | + |
| 112 | + slaFeedNames = Arrays.asList(oldFeedName + "-1", oldFeedName + "-2", oldFeedName + "-3"); |
| 113 | + |
| 114 | + //Submit 3 feeds with different frequencies and sla values. |
| 115 | + for (int bIndex = 0; bIndex < noOfFeeds; ++bIndex) { |
| 116 | + final FeedMerlin ipFeed = new FeedMerlin(bundles[0].getInputFeedFromBundle()); |
| 117 | + |
| 118 | + ipFeed.setValidity(startTime, endTime); |
| 119 | + ipFeed.setAvailabilityFlag("_SUCCESS"); |
| 120 | + |
| 121 | + //set slaLow and slaHigh |
| 122 | + ipFeed.setSla(new Frequency("1", Frequency.TimeUnit.minutes), |
| 123 | + new Frequency("2", Frequency.TimeUnit.minutes)); |
| 124 | + ipFeed.setName(slaFeedNames.get(bIndex)); |
| 125 | + ipFeed.setFrequency(slaFeedFrequencies.get(bIndex)); |
| 126 | + |
| 127 | + LOGGER.info("Feed is : " + ipFeed.toString()); |
| 128 | + |
| 129 | + AssertUtil.assertSucceeded(prism.getFeedHelper().submitEntity(ipFeed.toString())); |
| 130 | + } |
| 131 | + } |
| 132 | + |
| 133 | + @AfterMethod(alwaysRun = true) |
| 134 | + public void tearDown() throws IOException { |
| 135 | + cleanTestsDirs(); |
| 136 | + removeTestClassEntities(); |
| 137 | + } |
| 138 | + |
| 139 | + /** |
| 140 | + * The following test submits 3 feeds, checks the slaAlert for a given time range and validates its output. |
| 141 | + * It also checks the sla status when feed is deleted , data created with/without _SUCCESS folder. |
| 142 | + * @throws Exception |
| 143 | + */ |
| 144 | + @Test |
| 145 | + public void feedSLATest() throws Exception { |
| 146 | + /**TEST : Check sla response for a given time range |
| 147 | + */ |
| 148 | + |
| 149 | + statusCheckFrequency=60; // 60 seconds |
| 150 | + |
| 151 | + // Map of instanceDate and corresponding list of SchedulableEntityInstance |
| 152 | + Map<String, List<SchedulableEntityInstance>> instanceEntityMap = new HashMap<>(); |
| 153 | + |
| 154 | + slaStartTime = startTime; |
| 155 | + slaEndTime = TimeUtil.addMinsToTime(slaStartTime, 10); |
| 156 | + DateTime slaStartDate = TimeUtil.oozieDateToDate(slaStartTime); |
| 157 | + DateTime slaEndDate = TimeUtil.oozieDateToDate(slaEndTime); |
| 158 | + |
| 159 | + List<SchedulableEntityInstance> expectedInstances = new ArrayList<>(); |
| 160 | + SchedulableEntityInstance expectedSchedulableEntityInstance; |
| 161 | + |
| 162 | + for (int index = 0; index < noOfFeeds; ++index) { |
| 163 | + |
| 164 | + DateTime dt = new DateTime(slaStartDate); |
| 165 | + while (!dt.isAfter(slaEndDate)) { |
| 166 | + |
| 167 | + expectedSchedulableEntityInstance = new SchedulableEntityInstance(slaFeedNames.get(index), |
| 168 | + clusterName, dt.toDate(), EntityType.FEED); |
| 169 | + expectedSchedulableEntityInstance.setTags("Missed SLA High"); |
| 170 | + expectedInstances.add(expectedSchedulableEntityInstance); |
| 171 | + |
| 172 | + if (!instanceEntityMap.containsKey(dt.toString())) { |
| 173 | + instanceEntityMap.put(dt.toString(), new ArrayList<SchedulableEntityInstance>()); |
| 174 | + } |
| 175 | + instanceEntityMap.get(dt.toString()).add(expectedSchedulableEntityInstance); |
| 176 | + dt = dt.plusMinutes(slaFeedFrequencies.get(index).getFrequencyAsInt()); |
| 177 | + |
| 178 | + } |
| 179 | + } |
| 180 | + |
| 181 | + TimeUtil.sleepSeconds(statusCheckFrequency); |
| 182 | + |
| 183 | + SchedulableEntityInstanceResult response = prism.getFeedHelper().getSlaAlert( |
| 184 | + "?start=" + slaStartTime + "&end=" + slaEndTime).getSlaResult(); |
| 185 | + |
| 186 | + LOGGER.info(response.getMessage()); |
| 187 | + |
| 188 | + validateInstances(response, expectedInstances); |
| 189 | + |
| 190 | + /**TEST : Create missing dependencies with _SUCCESS directory and check sla response |
| 191 | + */ |
| 192 | + |
| 193 | + String dateEntry = (String) instanceEntityMap.keySet().toArray()[1]; |
| 194 | + LOGGER.info(dateEntry + "/" + instanceEntityMap.get(dateEntry)); |
| 195 | + List<String> dataDates = InstanceUtil.getMinuteDatesToPath(dateEntry, dateEntry, 0); |
| 196 | + |
| 197 | + HadoopUtil.createFolders(clusterFS, baseTestHDFSDir + "/input/", dataDates); |
| 198 | + |
| 199 | + //sla response for feeds when _SUCCESS file is missing from dataPath |
| 200 | + response = prism.getFeedHelper().getSlaAlert("?start=" + slaStartTime + "&end=" + slaEndTime).getSlaResult(); |
| 201 | + |
| 202 | + // Response does not change as it checks for _SUCCESS file |
| 203 | + validateInstances(response, expectedInstances); |
| 204 | + |
| 205 | + //Create _SUCCESS file |
| 206 | + HadoopUtil.recreateDir(clusterFS, baseTestHDFSDir + "/input/" + dataDates.get(0) + "/_SUCCESS"); |
| 207 | + for (SchedulableEntityInstance instance : instanceEntityMap.get(dateEntry)) { |
| 208 | + expectedInstances.remove(instance); |
| 209 | + } |
| 210 | + instanceEntityMap.remove(dateEntry); |
| 211 | + |
| 212 | + TimeUtil.sleepSeconds(statusCheckFrequency); |
| 213 | + |
| 214 | + //sla response for feeds when _SUCCESS file is available in dataPath |
| 215 | + response = prism.getFeedHelper().getSlaAlert("?start=" + slaStartTime + "&end=" + slaEndTime).getSlaResult(); |
| 216 | + validateInstances(response, expectedInstances); |
| 217 | + |
| 218 | + /** TEST : Delete feed and check sla response |
| 219 | + */ |
| 220 | + String deletedFeed = slaFeedNames.get(0); |
| 221 | + prism.getFeedHelper().deleteByName(deletedFeed, null); |
| 222 | + |
| 223 | + for (Map.Entry<String, List<SchedulableEntityInstance>> entry : instanceEntityMap.entrySet()) |
| 224 | + { |
| 225 | + LOGGER.info(entry.getKey() + "/" + entry.getValue()); |
| 226 | + for (SchedulableEntityInstance instance : entry.getValue()) { |
| 227 | + if (instance.getEntityName().equals(deletedFeed)) { |
| 228 | + expectedInstances.remove(instance); |
| 229 | + } |
| 230 | + } |
| 231 | + |
| 232 | + } |
| 233 | + TimeUtil.sleepSeconds(statusCheckFrequency); |
| 234 | + response = prism.getFeedHelper().getSlaAlert("?start=" + slaStartTime + "&end=" + slaEndTime).getSlaResult(); |
| 235 | + validateInstances(response, expectedInstances); |
| 236 | + |
| 237 | + } |
| 238 | + |
| 239 | + /** |
| 240 | + * Validating expected response with actual response. |
| 241 | + * @param response SchedulableEntityInstanceResult response |
| 242 | + * @param expectedInstances List of expected instances |
| 243 | + */ |
| 244 | + private static void validateInstances(SchedulableEntityInstanceResult response, |
| 245 | + List<SchedulableEntityInstance> expectedInstances) { |
| 246 | + |
| 247 | + List<SchedulableEntityInstance> actualInstances = Arrays.asList(response.getInstances()); |
| 248 | + |
| 249 | + for (SchedulableEntityInstance instance : actualInstances) { |
| 250 | + instance.setTags("Missed SLA High"); |
| 251 | + } |
| 252 | + |
| 253 | + Collections.sort(expectedInstances, DEPENDENCY_COMPARATOR); |
| 254 | + Collections.sort(actualInstances, DEPENDENCY_COMPARATOR); |
| 255 | + |
| 256 | + Assert.assertEquals(actualInstances, expectedInstances, "Instances mismatch for"); |
| 257 | + } |
| 258 | +} |
0 commit comments