1 /*******************************************************************************
2 * ============LICENSE_START==================================================
4 * * ===========================================================================
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * * ===========================================================================
7 * * Licensed under the Apache License, Version 2.0 (the "License");
8 * * you may not use this file except in compliance with the License.
9 * * You may obtain a copy of the License at
11 * * http://www.apache.org/licenses/LICENSE-2.0
13 * * Unless required by applicable law or agreed to in writing, software
14 * * distributed under the License is distributed on an "AS IS" BASIS,
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * * See the License for the specific language governing permissions and
17 * * limitations under the License.
18 * * ============LICENSE_END====================================================
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
22 ******************************************************************************/
23 package org.onap.dmaap.datarouter.node;
25 import java.io.BufferedReader;
27 import java.io.FileReader;
28 import java.io.FileWriter;
29 import java.io.Writer;
30 import java.nio.file.Files;
31 import java.nio.file.Paths;
32 import java.util.Arrays;
33 import java.util.TimerTask;
34 import java.util.regex.Matcher;
35 import java.util.regex.Pattern;
38 * Cleanup of old log files.
40 * Periodically scan the log directory for log files that are older than the log file retention interval, and delete
41 * them. In a future release, This class will also be responsible for uploading events logs to the log server to
42 * support the log query APIs.
45 public class LogManager extends TimerTask {
47 private NodeConfigManager config;
48 private Matcher isnodelog;
49 private Matcher iseventlog;
50 private Uploader worker;
51 private String uploaddir;
52 private String logdir;
54 private class Uploader extends Thread implements DeliveryQueueHelper {
56 public long getInitFailureTimer() {
60 public double getFailureBackoff() {
64 public long getMaxFailureTimer() {
68 public long getExpirationTimer() {
72 public int getFairFileLimit() {
76 public long getFairTimeLimit() {
80 public String getDestURL(DestInfo dest, String fileid) {
81 return (config.getEventLogUrl());
84 public void handleUnreachable(DestInfo dest) {
87 public boolean handleRedirection(DestInfo dest, String location, String fileid) {
91 public boolean isFollowRedirects() {
95 public String getFeedId(String subid) {
99 private DeliveryQueue dq;
102 dq = new DeliveryQueue(this,
103 new DestInfo("LogUpload", uploaddir, null, null, null, config.getMyName(), config.getMyAuth(), false,
106 setName("Log Uploader");
110 private synchronized void snooze() {
113 } catch (Exception e) {
117 private synchronized void poke() {
129 private void scan() {
130 long threshold = System.currentTimeMillis() - config.getLogRetention();
131 File dir = new File(logdir);
132 String[] fns = dir.list();
134 String lastqueued = "events-000000000000.log";
135 String curlog = StatusLog.getCurLogFile();
136 curlog = curlog.substring(curlog.lastIndexOf('/') + 1);
138 Writer w = new FileWriter(uploaddir + "/.meta");
139 w.write("POST\tlogdata\nContent-Type\ttext/plain\n");
141 BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued"));
142 lastqueued = br.readLine();
144 } catch (Exception e) {
146 for (String fn : fns) {
147 if (!isnodelog.reset(fn).matches()) {
148 if (!iseventlog.reset(fn).matches()) {
151 if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
154 String pid = config.getPublishId();
155 Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
156 Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + "/.meta"));
157 } catch (Exception e) {
161 File f = new File(dir, fn);
162 if (f.lastModified() < threshold) {
166 try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) {
167 (new File(uploaddir + "/.meta")).delete();
168 w.write(lastqueued + "\n");
169 } catch (Exception e) {
175 * Construct a log manager
177 * The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary.
178 * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes).
180 public LogManager(NodeConfigManager config) {
181 this.config = config;
183 isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
184 iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
185 } catch (Exception e) {
187 logdir = config.getLogDir();
188 uploaddir = logdir + "/.spool";
189 (new File(uploaddir)).mkdirs();
190 long now = System.currentTimeMillis();
191 long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 300000);
192 long when = now - now % intvl + intvl + 20000L;
193 config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
194 worker = new Uploader();
198 * Trigger check for expired log files and log files to upload