1 /*******************************************************************************
\r
2 * ============LICENSE_START==================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package com.att.research.datarouter.node;
\r
26 import java.util.regex.*;
\r
28 import java.nio.file.*;
\r
32 * Cleanup of old log files.
\r
34 * Periodically scan the log directory for log files that are older than
\r
35 * the log file retention interval, and delete them. In a future release,
\r
36 * This class will also be responsible for uploading events logs to the
\r
37 * log server to support the log query APIs.
\r
40 public class LogManager extends TimerTask {
\r
41 private NodeConfigManager config;
\r
42 private Matcher isnodelog;
\r
43 private Matcher iseventlog;
\r
44 private Uploader worker;
\r
45 private String uploaddir;
\r
46 private String logdir;
\r
47 private class Uploader extends Thread implements DeliveryQueueHelper {
\r
48 public long getInitFailureTimer() { return(10000L); }
\r
49 public double getFailureBackoff() { return(2.0); }
\r
50 public long getMaxFailureTimer() { return(150000L); }
\r
51 public long getExpirationTimer() { return(604800000L); }
\r
52 public int getFairFileLimit() { return(10000); }
\r
53 public long getFairTimeLimit() { return(86400000); }
\r
54 public String getDestURL(DestInfo dest, String fileid) {
\r
55 return(config.getEventLogUrl());
\r
57 public void handleUnreachable(DestInfo dest) {}
\r
58 public boolean handleRedirection(DestInfo dest, String location, String fileid) { return(false); }
\r
59 public boolean isFollowRedirects() { return(false); }
\r
60 public String getFeedId(String subid) { return(null); }
\r
61 private DeliveryQueue dq;
\r
63 dq = new DeliveryQueue(this, new DestInfo("LogUpload", uploaddir, null, null, null, config.getMyName(), config.getMyAuth(), false, false));
\r
65 setName("Log Uploader");
\r
68 private synchronized void snooze() {
\r
71 } catch (Exception e) {
\r
74 private synchronized void poke() {
\r
84 private void scan() {
\r
85 long threshold = System.currentTimeMillis() - config.getLogRetention();
\r
86 File dir = new File(logdir);
\r
87 String[] fns = dir.list();
\r
89 String lastqueued = "events-000000000000.log";
\r
90 String curlog = StatusLog.getCurLogFile();
\r
91 curlog = curlog.substring(curlog.lastIndexOf('/') + 1);
\r
93 Writer w = new FileWriter(uploaddir + "/.meta");
\r
94 w.write("POST\tlogdata\nContent-Type\ttext/plain\n");
\r
96 BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued"));
\r
97 lastqueued = br.readLine();
\r
99 } catch (Exception e) {
\r
101 for (String fn: fns) {
\r
102 if (!isnodelog.reset(fn).matches()) {
\r
103 if (!iseventlog.reset(fn).matches()) {
\r
106 if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
\r
109 String pid = config.getPublishId();
\r
110 Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
\r
111 Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + "/.meta"));
\r
112 } catch (Exception e) {
\r
116 File f = new File(dir, fn);
\r
117 if (f.lastModified() < threshold) {
\r
122 (new File(uploaddir + "/.meta")).delete();
\r
123 Writer w = new FileWriter(uploaddir + "/.lastqueued");
\r
124 w.write(lastqueued + "\n");
\r
126 } catch (Exception e) {
\r
131 * Construct a log manager
\r
133 * The log manager will check for expired log files every 5 minutes
\r
134 * at 20 seconds after the 5 minute boundary. (Actually, the
\r
135 * interval is the event log rollover interval, which
\r
136 * defaults to 5 minutes).
\r
138 public LogManager(NodeConfigManager config) {
\r
139 this.config = config;
\r
141 isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
\r
142 iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
\r
143 } catch (Exception e) {}
\r
144 logdir = config.getLogDir();
\r
145 uploaddir = logdir + "/.spool";
\r
146 (new File(uploaddir)).mkdirs();
\r
147 long now = System.currentTimeMillis();
\r
148 long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 300000);
\r
149 long when = now - now % intvl + intvl + 20000L;
\r
150 config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
\r
151 worker = new Uploader();
\r
154 * Trigger check for expired log files and log files to upload
\r
156 public void run() {
\r