1 /*******************************************************************************
2 * ============LICENSE_START==================================================
4 * * ===========================================================================
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * * ===========================================================================
7 * * Licensed under the Apache License, Version 2.0 (the "License");
8 * * you may not use this file except in compliance with the License.
9 * * You may obtain a copy of the License at
11 * * http://www.apache.org/licenses/LICENSE-2.0
13 * * Unless required by applicable law or agreed to in writing, software
14 * * distributed under the License is distributed on an "AS IS" BASIS,
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * * See the License for the specific language governing permissions and
17 * * limitations under the License.
18 * * ============LICENSE_END====================================================
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
22 ******************************************************************************/
23 package org.onap.dmaap.datarouter.node;
26 import java.util.regex.*;
28 import java.nio.file.*;
32 * Cleanup of old log files.
34 * Periodically scan the log directory for log files that are older than
35 * the log file retention interval, and delete them. In a future release,
36 * This class will also be responsible for uploading events logs to the
37 * log server to support the log query APIs.
40 public class LogManager extends TimerTask {
41 private NodeConfigManager config;
42 private Matcher isnodelog;
43 private Matcher iseventlog;
44 private Uploader worker;
45 private String uploaddir;
46 private String logdir;
48 private class Uploader extends Thread implements DeliveryQueueHelper {
49 public long getInitFailureTimer() {
53 public double getFailureBackoff() {
57 public long getMaxFailureTimer() {
61 public long getExpirationTimer() {
65 public int getFairFileLimit() {
69 public long getFairTimeLimit() {
73 public String getDestURL(DestInfo dest, String fileid) {
74 return (config.getEventLogUrl());
77 public void handleUnreachable(DestInfo dest) {
80 public boolean handleRedirection(DestInfo dest, String location, String fileid) {
84 public boolean isFollowRedirects() {
88 public String getFeedId(String subid) {
92 private DeliveryQueue dq;
95 dq = new DeliveryQueue(this, new DestInfo("LogUpload", uploaddir, null, null, null, config.getMyName(), config.getMyAuth(), false, false));
97 setName("Log Uploader");
101 private synchronized void snooze() {
104 } catch (Exception e) {
108 private synchronized void poke() {
120 private void scan() {
121 long threshold = System.currentTimeMillis() - config.getLogRetention();
122 File dir = new File(logdir);
123 String[] fns = dir.list();
125 String lastqueued = "events-000000000000.log";
126 String curlog = StatusLog.getCurLogFile();
127 curlog = curlog.substring(curlog.lastIndexOf('/') + 1);
129 Writer w = new FileWriter(uploaddir + "/.meta");
130 w.write("POST\tlogdata\nContent-Type\ttext/plain\n");
132 BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued"));
133 lastqueued = br.readLine();
135 } catch (Exception e) {
137 for (String fn : fns) {
138 if (!isnodelog.reset(fn).matches()) {
139 if (!iseventlog.reset(fn).matches()) {
142 if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
145 String pid = config.getPublishId();
146 Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
147 Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + "/.meta"));
148 } catch (Exception e) {
152 File f = new File(dir, fn);
153 if (f.lastModified() < threshold) {
158 (new File(uploaddir + "/.meta")).delete();
159 Writer w = new FileWriter(uploaddir + "/.lastqueued");
160 w.write(lastqueued + "\n");
162 } catch (Exception e) {
168 * Construct a log manager
170 * The log manager will check for expired log files every 5 minutes
171 * at 20 seconds after the 5 minute boundary. (Actually, the
172 * interval is the event log rollover interval, which
173 * defaults to 5 minutes).
175 public LogManager(NodeConfigManager config) {
176 this.config = config;
178 isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
179 iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
180 } catch (Exception e) {
182 logdir = config.getLogDir();
183 uploaddir = logdir + "/.spool";
184 (new File(uploaddir)).mkdirs();
185 long now = System.currentTimeMillis();
186 long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 300000);
187 long when = now - now % intvl + intvl + 20000L;
188 config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
189 worker = new Uploader();
193 * Trigger check for expired log files and log files to upload