1 /*******************************************************************************
2 * ============LICENSE_START==================================================
4 * * ===========================================================================
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * * ===========================================================================
7 * * Licensed under the Apache License, Version 2.0 (the "License");
8 * * you may not use this file except in compliance with the License.
9 * * You may obtain a copy of the License at
11 * * http://www.apache.org/licenses/LICENSE-2.0
13 * * Unless required by applicable law or agreed to in writing, software
14 * * distributed under the License is distributed on an "AS IS" BASIS,
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * * See the License for the specific language governing permissions and
17 * * limitations under the License.
18 * * ============LICENSE_END====================================================
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
22 ******************************************************************************/
23 package org.onap.dmaap.datarouter.node;
25 import com.att.eelf.configuration.EELFLogger;
26 import com.att.eelf.configuration.EELFManager;
27 import java.io.BufferedReader;
29 import java.io.FileReader;
30 import java.io.FileWriter;
31 import java.io.Writer;
32 import java.nio.file.Files;
33 import java.nio.file.Paths;
34 import java.util.Arrays;
35 import java.util.TimerTask;
36 import java.util.regex.Matcher;
37 import java.util.regex.Pattern;
40 * Cleanup of old log files.
42 * Periodically scan the log directory for log files that are older than the log file retention interval, and delete
43 * them. In a future release, This class will also be responsible for uploading events logs to the log server to
44 * support the log query APIs.
47 public class LogManager extends TimerTask {
48 private EELFLogger logger = EELFManager.getInstance().getLogger(LogManager.class);
49 private NodeConfigManager config;
50 private Matcher isnodelog;
51 private Matcher iseventlog;
52 private Uploader worker;
53 private String uploaddir;
54 private String logdir;
56 private class Uploader extends Thread implements DeliveryQueueHelper {
57 private EELFLogger logger = EELFManager.getInstance().getLogger(Uploader.class);
59 public long getInitFailureTimer() {
63 public long getWaitForFileProcessFailureTimer() {
67 public double getFailureBackoff() {
71 public long getMaxFailureTimer() {
75 public long getExpirationTimer() {
79 public int getFairFileLimit() {
83 public long getFairTimeLimit() {
87 public String getDestURL(DestInfo destinationInfo, String fileid) {
88 return (config.getEventLogUrl());
91 public void handleUnreachable(DestInfo destinationInfo) {
94 public boolean handleRedirection(DestInfo destinationInfo, String location, String fileid) {
98 public boolean isFollowRedirects() {
102 public String getFeedId(String subid) {
106 private DeliveryQueue dq;
109 dq = new DeliveryQueue(this,
110 new DestInfo.DestInfoBuilder().setName("LogUpload").setSpool(uploaddir).setSubid(null).setLogdata(null)
111 .setUrl(null).setAuthuser(config.getMyName()).setAuthentication(config.getMyAuth())
112 .setMetaonly(false).setUse100(false).setPrivilegedSubscriber(false).setFollowRedirects(false)
113 .setDecompress(false).createDestInfo());
115 setName("Log Uploader");
119 private synchronized void snooze() {
122 } catch (Exception e) {
123 logger.error("InterruptedException", e);
127 private synchronized void poke() {
139 private void scan() {
140 long threshold = System.currentTimeMillis() - config.getLogRetention();
141 File dir = new File(logdir);
142 String[] fns = dir.list();
144 String lastqueued = "events-000000000000.log";
145 String curlog = StatusLog.getCurLogFile();
146 curlog = curlog.substring(curlog.lastIndexOf('/') + 1);
148 Writer w = new FileWriter(uploaddir + "/.meta");
149 w.write("POST\tlogdata\nContent-Type\ttext/plain\n");
151 BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued"));
152 lastqueued = br.readLine();
154 } catch (Exception e) {
155 logger.error("Exception", e);
157 for (String fn : fns) {
158 if (!isnodelog.reset(fn).matches()) {
159 if (!iseventlog.reset(fn).matches()) {
162 if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
165 String pid = config.getPublishId();
166 Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
167 Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + "/.meta"));
168 } catch (Exception e) {
169 logger.error("Exception", e);
173 File f = new File(dir, fn);
174 if (f.lastModified() < threshold) {
178 try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) {
179 (new File(uploaddir + "/.meta")).delete();
180 w.write(lastqueued + "\n");
181 } catch (Exception e) {
182 logger.error("Exception", e);
188 * Construct a log manager
190 * The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary.
191 * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes).
193 public LogManager(NodeConfigManager config) {
194 this.config = config;
196 isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
197 iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
198 } catch (Exception e) {
199 logger.error("Exception", e);
201 logdir = config.getLogDir();
202 uploaddir = logdir + "/.spool";
203 (new File(uploaddir)).mkdirs();
204 long now = System.currentTimeMillis();
205 long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 30000);
206 long when = now - now % intvl + intvl + 20000L;
207 config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
208 worker = new Uploader();
212 * Trigger check for expired log files and log files to upload