@Component
public class UniversalEventAdapter implements GenericAdapter {
- private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private String enterpriseId;
@Value("${defaultEnterpriseId}")
if (VESAdapterInitializer.getMappingFiles().containsKey(enterpriseId)) {
configFileData = VESAdapterInitializer.getMappingFiles().get(enterpriseId);
- LOGGER.debug("Using Mapping file as Mapping file is available for Enterprise Id:{}",enterpriseId);
+ debugLogger.debug("Using Mapping file as Mapping file is available for Enterprise Id:{}",enterpriseId);
} else {
configFileData = VESAdapterInitializer.getMappingFiles().get(defaultEnterpriseId);
- LOGGER.debug("Using Default Mapping file as Mapping file is not available for Enterprise Id:{}",enterpriseId);
+ debugLogger.debug("Using Default Mapping file as Mapping file is not available for Enterprise Id:{}",enterpriseId);
}
Smooks smooksTemp = new Smooks(new ByteArrayInputStream(configFileData.getBytes(StandardCharsets.UTF_8)));
eventToSmooksMapping.put(eventType, smooksTemp);
VesEvent vesEvent = SmooksUtils.getTransformedObjectForInput(smooksTemp,incomingJsonString);
- LOGGER.debug("Incoming json transformed to VES format successfully");
+ debugLogger.info("Incoming json transformed to VES format successfully");
ObjectMapper objectMapper = new ObjectMapper();
result = objectMapper.writeValueAsString(vesEvent);
- LOGGER.debug("Serialized VES json");
+ debugLogger.info("Serialized VES json");
} catch (JsonProcessingException exception) {
throw new VesException("Unable to convert pojo to VES format, Reason :{}", exception);
} catch (SAXException | IOException exception) {
//Invalid Mapping file
- LOGGER.error("Dropping this Trap :{},due to error Occured :Reason:", incomingJsonString, exception);
+ errorLogger.error("Dropping this Trap :{},Reason:{}", incomingJsonString, exception);
} catch (JsonSyntaxException exception) {
// Invalid Trap
- LOGGER.error("Dropping this Invalid json Trap :{}, Reason:", incomingJsonString, exception);
+ errorLogger.error("Dropping this Invalid json Trap :{}, Reason:{}", incomingJsonString, exception);
}catch (JsonParseException exception) {
// Invalid Trap
- LOGGER.error("Dropping this Invalid json Trap :{}, Reason:", incomingJsonString, exception);
+ errorLogger.error("Dropping this Invalid json Trap :{}, Reason:{}", incomingJsonString, exception);
}
catch (RuntimeException exception) {
- LOGGER.error("Dropping this Trap :{},Reason:", incomingJsonString, exception);
+ errorLogger.error("Dropping this Trap :{},Reason:{}", incomingJsonString, exception);
}
return result;
public void destroy() {
for (Smooks smooks : eventToSmooksMapping.values())
smooks.close();
- LOGGER.debug("All Smooks objects closed");
+ debugLogger.warn("All Smooks objects closed");
}
}
@Component
public abstract class DMaaPMRBaseConfig {
- protected static final Logger LOG = LoggerFactory.getLogger(DMaaPMRBaseConfig.class);
+
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
protected String hostName;
protected Integer portNumber;
final String errorMessage =
"Unsupported protocol selection. Only HTTPS and HTTPS are currently supported for DMaaP MR";
- throw new DMaapException(errorMessage, LOG, new IllegalArgumentException(errorMessage));
+ throw new DMaapException(errorMessage, errorLogger, new IllegalArgumentException(errorMessage));
}
return normalizedProtocolString;
}
final String errorMessage =
"Unsupported content type selection. Only application/json is currently supported for DMaaP MR";
- throw new DMaapException(errorMessage, LOG, new IllegalArgumentException(errorMessage));
+ throw new DMaapException(errorMessage, errorLogger, new IllegalArgumentException(errorMessage));
}
return normalizedContentType;
}
@RestController
public class VesController {
- private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
@Autowired
private VesService vesService;
@RequestMapping("/start")
public String start() {
- LOGGER.info("UniversalVesAdapter Application starting...");
+ metricsLogger.info("UniversalVesAdapter Application starting...");
try {
vesService.start();
} catch (MapperConfigException e) {
- LOGGER.error("Config error:{}",e.getMessage(),e.getCause());
+ errorLogger.error("Config error:{}",e.getMessage(),e.getCause());
}
return "Application started";
}
@RequestMapping("/reload")
public void reloadMappingFileFromDB() {
- LOGGER.debug("Reload of Mapping File is started");
+ debugLogger.debug("Reload of Mapping File is started");
vESAdapterInitializer.fetchMappingFile();
- LOGGER.debug("Reload of Mapping File is completed");
+ debugLogger.debug("Reload of Mapping File is completed");
}
@RequestMapping("/healthcheck")
public String healthcheck() {
- LOGGER.debug("UniversalVesAdapter Application is up & running...");
+ metricsLogger.info("UniversalVesAdapter Application is up & running...");
return "UniversalVesAdapter Application is up & running...";
}
public String stop() {
vesService.stop();
- LOGGER.debug("UniversalVesAdapter Application is stopping...");
+ metricsLogger.info("UniversalVesAdapter Application is stopping...");
return "Application will be stopped soon";
}
}
@ComponentScan
public abstract class BaseDMaaPMRComponent {
- private static final Logger LOG = LoggerFactory.getLogger(BaseDMaaPMRComponent.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private static final ObjectMapper objectMapper = new ObjectMapper();
public BaseDMaaPMRComponent() {}
.setPath(dmaapUriPathPrefix + topicName).build();
} catch (URISyntaxException e) {
final String errorMessage = format("Error while creating publisher URI: %s", e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
- LOG.info("Created DMaaP MR Publisher URI: {}", publisherURI);
+ debugLogger.info("Created DMaaP MR Publisher URI: {}", publisherURI);
return publisherURI;
}
} catch (URISyntaxException e) {
final String errorMessage = format("Error while creating subscriber URI: %s", e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
- LOG.info("Created DMaaP MR Subscriber URI: {}", subscriberURI);
+ debugLogger.info("Created DMaaP MR Subscriber URI: {}", subscriberURI);
return subscriberURI;
}
try {
publisherQueue.addRecoverableMessages(messages);
- LOG.debug("Messages Added to Recovery Queue. Messages Size: {}, Recovery Queue Remaining Size: {}",
+ debugLogger.debug("Messages Added to Recovery Queue. Messages Size: {}, Recovery Queue Remaining Size: {}",
messages.size(), publisherQueue.getBatchQueueRemainingSize());
} catch (IllegalStateException e) {
final String errorMessage = format("Unable to put messages in recovery queue. Messages will be lost. " +
"Recovery Queue might be full. Message Size: %d, Recovery Queue Remaining Capacity: %d",
messages.size(), publisherQueue.getRecoveryQueueRemainingSize());
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
}
final String errorMessage =
format("Unable to convert publisher messages to Json. Messages: %s, Json Error: %s",
messages, e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
} catch (IOException e) {
final String errorMessage =
format("IO Exception while converting publisher messages to Json. Messages: %s, Json Error: %s",
messages, e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
}
final String errorMessage =
format("Unable to convert subscriber Json String to Messages. Subscriber Response String: %s," +
" Json Error: %s", messagesJsonString, e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
}
@Component
public class Creator {
- private final Logger LOGGER = LoggerFactory.getLogger(Creator.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private DMaaPMRFactory dMaaPMRFactoryInstance;
private String dmaaphost;
private String publisherTopic;
this.publisherTopic = dmaapConfig.getPublisherTopic();
this.subcriberTopic = dmaapConfig.getSubscriberTopic();
this.dMaaPMRFactoryInstance = DMaaPMRFactory.create();
- LOGGER.info("The Hostname of DMaap is :" + dmaaphost);
+ debugLogger.info("The Hostname of DMaap is :" + dmaaphost);
}
try {
dMaaPMRPublisherConfig = new DMaaPMRPublisherConfig.Builder(dmaaphost, publisherTopic,dmaapConfig).build();
} catch (IOException e) {
- LOGGER.error("failed or interrupted I/O operations while creating publisher config:{}",e.getCause());
+ errorLogger.error("failed or interrupted I/O operations while creating publisher config:{}",e.getCause());
}
return dMaaPMRFactoryInstance.createPublisher(dMaaPMRPublisherConfig);
}
dMaaPMRSubscriberConfig = new DMaaPMRSubscriberConfig.Builder(dmaaphost, subcriberTopic, dmaapConfig).build();
} catch (IOException e) {
- LOGGER.error("failed or interrupted I/O operations while creating subcriber config:{}",e.getCause());
+ errorLogger.error("failed or interrupted I/O operations while creating subcriber config:{}",e.getCause());
}
return dMaaPMRFactoryInstance.createSubscriber(dMaaPMRSubscriberConfig);
public class DMaaPMRFactory {
- private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRFactory.class);
-
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
+
private final Injector injector;
public DMaaPMRFactory(AbstractModule guiceModule) {
*/
public DMaaPMRPublisher createPublisher(@Nonnull DMaaPMRPublisherConfig publisherConfig) {
final DMaaPMRPublisherFactory publisherFactory = injector.getInstance(Key.get(DMaaPMRPublisherFactory.class));
- LOG.debug("Creating new DMaaP MR Publisher Instance with configuration: {}", publisherConfig);
+ debugLogger.debug("Creating new DMaaP MR Publisher Instance with configuration: {}", publisherConfig);
final DMaaPMRPublisher dMaaPMRPublisher = publisherFactory.create(publisherConfig);
- LOG.info("Created new DMaaP MR Publisher Instance. Publisher creation time: {}",
+ debugLogger.info("Created new DMaaP MR Publisher Instance. Publisher creation time: {}",
dMaaPMRPublisher.getPublisherCreationTime());
return dMaaPMRPublisher;
}
*/
public DMaaPMRSubscriber createSubscriber(@Nonnull DMaaPMRSubscriberConfig subscriberConfig) {
final DMaaPMRSubscriberFactory subscriberFactory = injector.getInstance(DMaaPMRSubscriberFactory.class);
- LOG.debug("Creating new DMaaP MR Subscriber Instance with configuration: {}", subscriberConfig);
+ debugLogger.debug("Creating new DMaaP MR Subscriber Instance with configuration: {}", subscriberConfig);
final DMaaPMRSubscriber dMaaPMRSubscriber = subscriberFactory.create(subscriberConfig);
- LOG.info("Created new DMaaP MR Subscriber Instance. Subscriber creation time: {}",
+ debugLogger.info("Created new DMaaP MR Subscriber Instance. Subscriber creation time: {}",
dMaaPMRSubscriber.getSubscriberCreationTime());
return dMaaPMRSubscriber;
}
*/
public static DMaaPMRFactory create() {
final DMaaPMRFactory dMaaPMRFactory = new DMaaPMRFactory(new AnalyticsDMaaPModule());
- LOG.info("Created new instance of DMaaP MR Factory");
+ debugLogger.info("Created new instance of DMaaP MR Factory");
return dMaaPMRFactory;
}
private int publisherMaxFlushRetries;
- private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRPublisherImpl.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
+ static
public int a =2;
private final DMaaPMRPublisherConfig publisherConfig;
// if messages size is less than batch queue size - just queue them for batch publishing
if (batchQueueRemainingSize > messages.size()) {
- LOG.debug("Adding messages to batch Queue. No flushing required. Messages Size:{}. Batch Queue Size:{}",
+ debugLogger.debug("Adding messages to batch Queue. No flushing required. Messages Size:{}. Batch Queue Size:{}",
messages.size(), batchQueueRemainingSize);
final int batchQueueSize = publisherQueue.addBatchMessages(messages);
return createPublisherAcceptedResponse(batchQueueSize);
// grab all already queued messages, append current messages and force publish them to DMaaP MR topic
final List<String> queueMessages = publisherQueue.getMessageForPublishing();
- LOG.debug("Batch Queue capacity exceeds messages size. Flushing of all pending messages to DMaaP MR " +
+ debugLogger.debug("Batch Queue capacity exceeds messages size. Flushing of all pending messages to DMaaP MR " +
"Publisher Topic.");
return forcePublish(Lists.newLinkedList(Iterables.concat(queueMessages, messages)));
}
@Override
public DMaaPMRPublisherResponse forcePublish(List<String> messages) {
- LOG.debug("Force publishing messages to DMaaP MR Topic. Messages Size: {}", messages.size());
+ debugLogger.debug("Force publishing messages to DMaaP MR Topic. Messages Size: {}", messages.size());
final String contentType = publisherConfig.getContentType();
final String userName =(publisherConfig.getUserName().equals("null")) ? null : publisherConfig.getUserName();
if (authHeader.isPresent()) {
postRequest.addHeader(HttpHeaders.AUTHORIZATION, authHeader.get());
} else {
- LOG.debug("DMaaP MR Publisher Authentication is disabled as username or password is not present.");
+ debugLogger.debug("DMaaP MR Publisher Authentication is disabled as username or password is not present.");
}
// Create post string entity
final String responseBody = responsePair.getRight();
// if messages were published successfully, return successful response
if (HTTPUtils.isSuccessfulResponseCode(responseCode)) {
- LOG.debug("DMaaP MR Messages published successfully. DMaaP Response Code: {}. DMaaP Response " +
+ debugLogger.debug("DMaaP MR Messages published successfully. DMaaP Response Code: {}. DMaaP Response " +
"Body: {}, Number of Messages published: {}",
responseCode, responseBody, messages.size());
} else {
- LOG.warn("Unable to publish messages to DMaaP MR Topic. DMaaP Response Code: {}, DMaaP Response " +
+ debugLogger.warn("Unable to publish messages to DMaaP MR Topic. DMaaP Response Code: {}, DMaaP Response " +
"Body: {}. Messages will be queued in recovery queue", responseCode, responseBody);
addMessagesToRecoveryQueue(publisherQueue, messages);
}
final String errorMessage = format("IO Exception while publishing messages to DMaaP Topic. " +
"Messages will be queued in recovery queue. Messages Size: %d", messages.size());
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
}
final List<String> queueMessages = publisherQueue.getMessageForPublishing();
// If there are no message return 204 (No Content) response code
if (queueMessages.isEmpty()) {
- LOG.debug("No messages to publish to batch queue. Returning 204 status code");
+ debugLogger.debug("No messages to publish to batch queue. Returning 204 status code");
return createPublisherNoContentResponse();
} else {
// force publish messages in queue
flushResponseCode = flushResponse.getResponseCode();
if (!HTTPUtils.isSuccessfulResponseCode(flushResponseCode)) {
- LOG.warn("Unable to flush batch messages to publisher due to DMaaP MR invalid Response: {}. " +
+ debugLogger.warn("Unable to flush batch messages to publisher due to DMaaP MR invalid Response: {}. " +
"Retrial No: {} of Max {} Retries", flushResponseCode, retrialNumber,
publisherMaxFlushRetries);
!HTTPUtils.isSuccessfulResponseCode(flushResponseCode));
if (!HTTPUtils.isSuccessfulResponseCode(flushResponseCode)) {
- LOG.error("Unable to flush batch messages to publisher. Messages loss cannot be prevented");
+ errorLogger.error("Unable to flush batch messages to publisher. Messages loss cannot be prevented");
} else {
- LOG.info("Successfully published all batched messages to publisher.");
+ debugLogger.info("Successfully published all batched messages to publisher.");
}
// close http client
*/
public class DMaaPMRPublisherQueueImpl implements DMaaPMRPublisherQueue {
- private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRPublisherQueueImpl.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+
private final LinkedBlockingDeque<String> batchQueue;
private final LinkedBlockingDeque<String> recoveryQueue;
@Assisted("recoveryQueueSize") int recoveryQueueSize) {
batchQueue = new LinkedBlockingDeque<>(batchQueueSize);
recoveryQueue = new LinkedBlockingDeque<>(recoveryQueueSize);
- LOG.debug("Creating Instance of DMaaP Publisher Queue. BatchQueueSize: {}, RecoveryQueueSize: {}",
+ debugLogger.debug("Creating Instance of DMaaP Publisher Queue. BatchQueueSize: {}, RecoveryQueueSize: {}",
batchQueueSize, recoveryQueueSize);
}
// get messages from recovery queue if present
if (!recoveryQueue.isEmpty()) {
final int recoveryQueueSize = recoveryQueue.drainTo(recoveryMessageList);
- LOG.debug("Drained Recovery Queue elements for flushing: {}", recoveryQueueSize);
+ debugLogger.debug("Drained Recovery Queue elements for flushing: {}", recoveryQueueSize);
}
// get messages from batch queue if present
if (!batchQueue.isEmpty()) {
final int batchQueueSize = batchQueue.drainTo(batchMessagesList);
- LOG.debug("Drained Batch Queue elements for flushing: {}", batchQueueSize);
+ debugLogger.debug("Drained Batch Queue elements for flushing: {}", batchQueueSize);
}
// concat recovery and batch queue elements
*/
public class DMaaPMRSubscriberImpl extends BaseDMaaPMRComponent implements DMaaPMRSubscriber {
- private static final Logger LOG = LoggerFactory.getLogger(DMaaPMRSubscriberImpl.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private final DMaaPMRSubscriberConfig subscriberConfig;
private final CloseableHttpClient closeableHttpClient;
if (authHeader.isPresent()) {
getRequest.addHeader(HttpHeaders.AUTHORIZATION, authHeader.get());
} else {
- LOG.debug("DMaaP MR Subscriber Authentication is disabled as username or password is not present.");
+ debugLogger.debug("DMaaP MR Subscriber Authentication is disabled as username or password is not present.");
}
try {
responseMessage = "DMaaP Response Body had no messages";
}
} else {
- LOG.error("Unable to fetch messages to DMaaP MR Topic. DMaaP MR unsuccessful Response Code: {}, "
+ errorLogger.error("Unable to fetch messages to DMaaP MR Topic. DMaaP MR unsuccessful Response Code: {}, "
+ "DMaaP Response Body: {}", responseCode, responseBody);
}
final String errorMessage = format("IO Exception while fetching messages from DMaaP Topic. Exception %s",
e);
- throw new DMaapException(errorMessage, LOG, e);
+ throw new DMaapException(errorMessage, errorLogger, e);
}
}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class DMaapService {
- private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private static List<String> list = new LinkedList<String>();
@Autowired
private UniversalEventAdapter eventAdapter;
*/
public void fetchAndPublishInDMaaP(DMaaPMRSubscriber dMaaPMRSubscriber, DMaaPMRPublisher publisher, Creator creater)
throws InterruptedException {
- LOGGER.info("fetch and publish from and to Dmaap started");
-
+ metricsLogger.info("fetch and publish from and to Dmaap started");
int pollingInternalInt=dmaapConfig.getPollingInterval();
- LOGGER.info("The Polling Interval in Milli Second is :" +pollingInternalInt);
+ debugLogger.info("The Polling Interval in Milli Second is :{}" +pollingInternalInt);
while (true) {
synchronized (this) {
for (String incomingJsonString : dMaaPMRSubscriber.fetchMessages().getFetchedMessages()) {
if (list.isEmpty()) {
Thread.sleep(pollingInternalInt);
}
- LOGGER.debug("number of messages to be converted :{}", list.size());
+ debugLogger.debug("number of messages to be converted :{}", list.size());
if (!list.isEmpty()) {
String val = ((LinkedList<String>) list).removeFirst();
if (vesEvent!=null && (!(vesEvent.isEmpty() || vesEvent.equals("")))) {
messages.add(vesEvent);
publisher.publish(messages);
- LOGGER.info("Message successfully published to DMaaP Topic");
+ metricsLogger.info("Message successfully published to DMaaP Topic");
}
}
outgoingJsonString = eventAdapter.transform(incomingJsonString, "snmp");
} catch (VesException exception) {
- LOGGER.error("Received exception : " + exception.getMessage(), exception);
- LOGGER.error("APPLICATION WILL BE SHUTDOWN UNTIL ABOVE ISSUE IS RESOLVED.");
+ errorLogger.error("Received exception : {},{}" + exception.getMessage(), exception);
+ debugLogger.warn("APPLICATION WILL BE SHUTDOWN UNTIL ABOVE ISSUE IS RESOLVED.");
} catch (DMaapException e) {
- LOGGER.error("Received exception : ", e.getMessage());
+ errorLogger.error("Received exception : {}", e.getMessage());
}
}
return outgoingJsonString;
//AdapterInitializer
@Component
public class VESAdapterInitializer implements CommandLineRunner, Ordered {
- private static final Logger LOGGER = LoggerFactory.getLogger(VESAdapterInitializer.class);
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
@Autowired
private Creator creator;
public void run(String... args) throws Exception {
env = System.getenv();
for (Map.Entry<String, String> entry : env.entrySet()) {
- LOGGER.info(entry.getKey() + ":" + entry.getValue());
+ debugLogger.debug(entry.getKey() + ":" + entry.getValue());
}
if (env.containsKey("CONSUL_HOST") && env.containsKey("CONFIG_BINDING_SERVICE") && env.containsKey("HOSTNAME")) {
//TODO - Add logic to talk to Consul and CBS to get the configuration. For now, we will refer to configuration coming from docker env parameters
- LOGGER.info(">>>Dynamic configuration to be used");
+ debugLogger.info(">>>Dynamic configuration to be used");
if( (env.get("DMAAPHOST")==null ||
(env.get("MR_DEFAULT_PORT_NUMBER")==null ||
(env.get("JDBC_PASSWORD")==null )))))) {
- LOGGER.error("Some docker environment parameter is missing. Sample Usage is -\n sudo docker run -d -p 8085:8085/tcp --env URL_JDBC=jdbc:postgresql://10.53.172.129:5432/dummy --env JDBC_USERNAME=ngpuser --env JDBC_PASSWORD=root --env MR_DMAAPHOST=10.10.10.10 --env MR_DEFAULT_PORT_NUMBER=3904 --env CONSUL_HOST=10.53.172.109 --env HOSTNAME=mvp-dcaegen2-collectors-ves --env CONFIG_BINDING_SERVICE=config_binding_service -e DMAAPHOST='10.53.172.156' onap/org.onap.dcaegen2.services.mapper.vesadapter.universalvesadaptor:latest");
- System.exit(SpringApplication.exit(applicationContext, () -> {LOGGER.error("Application stoped due to missing default mapping file");return-1;}));
+ errorLogger.error("Some docker environment parameter is missing. Sample Usage is -\n sudo docker run -d -p 8085:8085/tcp --env URL_JDBC=jdbc:postgresql://10.53.172.129:5432/dummy --env JDBC_USERNAME=ngpuser --env JDBC_PASSWORD=root --env MR_DMAAPHOST=10.10.10.10 --env MR_DEFAULT_PORT_NUMBER=3904 --env CONSUL_HOST=10.53.172.109 --env HOSTNAME=mvp-dcaegen2-collectors-ves --env CONFIG_BINDING_SERVICE=config_binding_service -e DMAAPHOST='10.53.172.156' onap/org.onap.dcaegen2.services.mapper.vesadapter.universalvesadaptor:latest");
+ System.exit(SpringApplication.exit(applicationContext, () -> {errorLogger.error("Application stoped due to missing default mapping file");return-1;}));
}else {
} else {
- LOGGER.info(">>>Static configuration to be used");
+ debugLogger.info(">>>Static configuration to be used");
}
prepareDatabase();
fetchMappingFile();
- LOGGER.info("Triggering controller's start url ");
+ debugLogger.info("Triggering controller's start url ");
executecurl("http://localhost:"+serverPort+"/start");
}
private static String executecurl(String url) {
- LOGGER.info("Running curl command for url:"+url);
+ debugLogger.info("Running curl command for url:{}",url);
String[] command = { "curl", "-v", url };
ProcessBuilder process = new ProcessBuilder(command);
Process p;
builder.append(line);
}
result = builder.toString();
- LOGGER.info(result);
+ debugLogger.debug(result);
}
} catch (IOException e) {
- LOGGER.error("error", e);
+ errorLogger.error("error", e);
}
return result;
public void fetchMappingFile() {
try (Connection con = DriverManager.getConnection(dBurl, user, pwd);PreparedStatement pstmt = con.prepareStatement("SELECT * FROM mapping_file");ResultSet rs = pstmt.executeQuery()) {
- LOGGER.info("Retrieving data from DB");
+ debugLogger.info("Retrieving data from DB");
// parsing the column each time is a linear search
int column1Pos = rs.findColumn("enterpriseid");
int column2Pos = rs.findColumn("mappingfilecontents");
String data = new String(bytes, "UTF-8");
mappingFiles.put(column1, data);
}
- LOGGER.info("DB Initialization Completed, Total # Mappingfiles are" + mappingFiles.size());
+ debugLogger.info("DB Initialization Completed, Total # Mappingfiles are:{}" , mappingFiles.size());
} catch (Exception e) {
- LOGGER.error("Error occured due to :" + e.getMessage());
+ errorLogger.error("Error occured due to :{}", e.getMessage());
}
}
private void prepareDatabase() throws IOException {
- LOGGER.info("The Default Mapping file Location:" + defaultMappingFileLocation.trim());
+ debugLogger.info("The Default Mapping file Location:" + defaultMappingFileLocation.trim());
if (ClassLoader.getSystemResource(defaultMappingFileLocation.trim()) == null) {
- LOGGER.error(
+ errorLogger.error(
"Default mapping file " + defaultMappingFileLocation.trim() + " is missing");
System.exit(SpringApplication.exit(applicationContext, () -> {
- LOGGER.error("Application stoped due to missing default mapping file");
+ errorLogger.error("Application stoped due to missing default mapping file");
return -1;
}));
}
fileInputStream.read(bytesArray);
} catch (IOException e1) {
- LOGGER.error("Exception Occured while reading the default mapping file ,Cause: "
+ errorLogger.error("Exception Occured while reading the default mapping file ,Cause: "
+ e1.getMessage(), e1);
// exit on missing default mapping file
System.exit(SpringApplication.exit(applicationContext, () -> {
- LOGGER.error("Application stoped due to missing default mapping file");
+ errorLogger.error("Application stoped due to missing default mapping file");
return -1;
}));
}
+ ")\r\n" + "WITH (\r\n" + " OIDS = FALSE\r\n" + ")\r\n"
+ "TABLESPACE pg_default;")) {
- LOGGER.info("Postgresql Connection successful...");
- LOGGER.debug("Connection object:" + con.toString());
+ metricsLogger.info("Postgresql Connection successful...");
+ debugLogger.debug("Connection object:{}" , con.toString());
pstmt11.executeUpdate();
- LOGGER.info("CREATE TABLE IF NOT EXISTS executed successfully....");
+ debugLogger.info("CREATE TABLE IF NOT EXISTS executed successfully....");
if ((bytesArray.length > 0) && (!Arrays.toString(bytesArray).equals(""))) {
- LOGGER.debug("2Connection object:" + con.toString());
+
try (PreparedStatement pstmt = con.prepareStatement("INSERT INTO "
+ MappingFileTableName
+ "(enterpriseid, mappingfilecontents, mimetype, File_Name) VALUES (?, ?, ?, ?) ON CONFLICT (enterpriseid) DO NOTHING;")) {
pstmt.setString(4, file.getName());
pstmt.executeUpdate();
- LOGGER.info("Made sure that default mapping file is present in table");
+ debugLogger.info("Made sure that default mapping file is present in table");
}
} else {
- LOGGER.error(file.getName() + " is empty");
+ errorLogger.error(file.getName() + " is empty");
// exit on empty mapping file
System.exit(SpringApplication.exit(applicationContext, () -> {
- LOGGER.error("Application stoped beacuase default mapping file is empty..");
+ errorLogger.error("Application stoped beacuase default mapping file is empty..");
return -1;
}));
}
} catch (SQLException e) {
- LOGGER.error("Received exception : " + e.getMessage(), e);
+ errorLogger.error("Received exception : " + e.getMessage(), e);
// exit on SqlException
System.exit(SpringApplication.exit(applicationContext, () -> {
- LOGGER.error("Application Stoped due to ", e.getCause());
+ errorLogger.error("Application Stoped due to ", e.getCause());
return -1;
}));
}
@Component
public class VesService {
- private final Logger LOGGER = LoggerFactory.getLogger(VesService.class);
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private boolean isRunning = true;
* method triggers universal VES adapter module.
*/
public void start() throws MapperConfigException {
- LOGGER.debug("Creating Subcriber and Publisher with creator.............");
+ debugLogger.info("Creating Subcriber and Publisher with creator.............");
DMaaPMRSubscriber subcriber = creator.getDMaaPMRSubscriber();
DMaaPMRPublisher publisher = creator.getDMaaPMRPublisher();
@Override
public void run() {
try {
- LOGGER.debug("starting subscriber & publisher thread:{}", Thread.currentThread().getName());
+ debugLogger.info("starting subscriber & publisher thread:{}", Thread.currentThread().getName());
dmaapService.fetchAndPublishInDMaaP(subcriber, publisher, creator);
} catch (InterruptedException e) {
- LOGGER.error("Exception in starting of subscriber & publisher thread:{}",e);
+ errorLogger.error("Exception in starting of subscriber & publisher thread:{}",e);
Thread.currentThread().interrupt();
}
}
*/
public class MapperConfigUtils {
- private static final Logger LOGGER = LoggerFactory.getLogger(MapperConfigUtils.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private static Set<Entry> entries = new TreeSet<>((o1, o2) -> o1.getPriority().compareTo(o2.getPriority()));
private enum JoinOperator {
readMapperConfigFile(mappingFile);
checkIncomingJsonForMatchingDomain(incomingJsonString);
} catch (MapperConfigException e) {
- LOGGER.error("Exception in mapperConfigFile reading:{}",e);
+ errorLogger.error("Exception in mapperConfigFile reading:{}",e);
}
}
throw new MapperConfigException("Unable to read config file for reason...\n " + exception.getMessage(),
exception);
}
- LOGGER.debug("Read config file content into :{}",config);
+ debugLogger.debug("Read config file content into :{}",config);
if (null != config) {
entries.addAll(config.getEntries());
public class SmooksUtils {
- private final static Logger LOGGER = LoggerFactory.getLogger(SmooksUtils.class);
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
/**
* converts the incoming json using passed smooks instance and return the pojo representation of VES event
*/
public static VesEvent getTransformedObjectForInput(Smooks smooks, String incomingJsonString) {
- LOGGER.info("Transforming incoming json " );
+ debugLogger.info("Transforming incoming json " );
ExecutionContext executionContext = smooks.createExecutionContext();
- LOGGER.info("Context created");
+ debugLogger.info("Context created");
Locale defaultLocale = Locale.getDefault();
Locale.setDefault(new Locale("en", "IE"));
Locale.setDefault(defaultLocale);
VesEvent vesEvent = (VesEvent) executionContext.getBeanContext().getBean("vesEvent");
- LOGGER.debug("consversion successful to VES Event");
+ debugLogger.debug("consversion successful to VES Event");
return vesEvent;
}
<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
+<!-- /*
+* ============LICENSE_START=======================================================
+* ONAP : DCAE
+* ================================================================================
+* Copyright 2018 TechMahindra
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/ -->
- <property name="DEV_HOME" value="logs" />
+<configuration scan="true" scanPeriod="30 seconds">
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
+ </layout>
+ </appender>
+
+ <property name="LOG_DIR" value="logs" />
+
+ <appender name="metricsAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/metrics.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
+ <!-- rollover daily -->
+ <fileNamePattern>${LOG_DIR}/metrics-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
+ </rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
- </Pattern>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|UNKNOWN|%thread||universalvesadpter||||COMPLETE|0|OK||%-5level||||||||||||||%class{36}:%M:%L: %m%n</pattern>
</layout>
</appender>
-
- <appender name="FILE-AUDIT"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${DEV_HOME}/debug.log</file>
- <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
- </Pattern>
- </encoder>
-
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+
+ <appender name="debugAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/debug.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
- <fileNamePattern>${DEV_HOME}/archived/debug.%d{yyyy-MM-dd}.%i.log
- </fileNamePattern>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <maxFileSize>10MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
+ <fileNamePattern>${LOG_DIR}/debug-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
</rollingPolicy>
-
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|%-5level:%class{36}:%M:%L: %m%n</pattern>
+ </layout>
</appender>
-
- <appender name="FILE-ERROR"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>ERROR</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${DEV_HOME}/DroppedEvents.log</file>
- <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} %logger{36} - %msg%n
- </Pattern>
- </encoder>
-
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+
+ <appender name="errorAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/error.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
- <fileNamePattern>${DEV_HOME}/archived/DroppedEvents.%d{yyyy-MM-dd}.%i.log
- </fileNamePattern>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <maxFileSize>10MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
+ <fileNamePattern>${LOG_DIR}/error-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
</rollingPolicy>
-
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|%thread|universalvesadpter||||ERROR|0|ERROR:|%class{36}:%M:%L: %m%n</pattern>
+ </layout>
</appender>
- <!-- Send logs to both console and file audit -->
- <logger name="org.onap.universalvesadapter" level="debug" additivity="false">
- <appender-ref ref="FILE-AUDIT" />
- <appender-ref ref="STDOUT" />
+ <logger name="metricsLogger" level="info" additivity="false">
+ <appender-ref ref="metricsAppender" />
</logger>
- <logger name="org.onap.universalvesadapter.adapter" level="debug" additivity="false">
- <appender-ref ref="FILE-AUDIT" />
- <appender-ref ref="FILE-ERROR" />
- <appender-ref ref="STDOUT" />
- </logger> -
- <!-- <logger name="org.onap.universalvesadapter.adapter" level="error" additivity="false">
- <appender-ref ref="FILE-ERROR" />
- </logger> -->
+ <logger name="debugLogger" level="debug" additivity="false">
+ <appender-ref ref="debugAppender" />
+ </logger>
+
+ <logger name="errorLogger" level="debug" additivity="false">
+ <appender-ref ref="errorAppender" />
+ <appender-ref ref="STDOUT" />
+ </logger>
+ <!--<logger name="com" level="INFO"/> -->
+ <root level="info">
+ <!-- <appender-ref ref="STDOUT" /> -->
+ <appender-ref ref="debugAppender" />
+ </root>
-
</configuration>
\ No newline at end of file
@Ignore
public class UniversalEventAdapterTest {
- private final Logger eLOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
@Mock
private VESAdapterInitializer vESAdapterInitializer;
assertNotNull(actualResult);
assertNotEquals("", actualResult);
} catch (VesException exception) {
- eLOGGER.error("Error occurred : ", exception );
+ errorLogger.error("Error occurred : {}", exception );
}
}
+++ /dev/null
-/*
-* ============LICENSE_START=======================================================
-* ONAP : DCAE
-* ================================================================================
-* Copyright 2018 TechMahindra
-*=================================================================================
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-* ============LICENSE_END=========================================================
-*/
-/*package org.onap.universalvesadapter.service;
-
-import static org.junit.Assert.*;
-
-import org.junit.Test;
-
-public class ConfigFileServiceTest {
-
- @Test
- public void test() {
- fail("Not yet implemented");
- }
-
-}
-*/
\ No newline at end of file
+++ /dev/null
-/*
-* ============LICENSE_START=======================================================
-* ONAP : DCAE
-* ================================================================================
-* Copyright 2018 TechMahindra
-*=================================================================================
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-* ============LICENSE_END=========================================================
-
-package org.onap.universalvesadapter.service;
-
-import static org.junit.Assert.assertEquals;
-import java.net.URI;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
-import org.onap.universalvesadapter.Application;
-import org.onap.universalvesadapter.exception.ConfigFileReadException;
-import org.onap.universalvesadapter.service.DiskRepoConfigFileService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.web.client.RestTemplate;
-
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = Application.class)
-public class DiskRepoConfigFileServiceTest {
-
- @Mock
- RestTemplate restTemplate;
-
- @InjectMocks
- @Autowired
- DiskRepoConfigFileService diskRepoConfigFileService;
-
- private final Logger eLOGGER = LoggerFactory.getLogger(this.getClass());
-
- @Before
- public void init() {
- MockitoAnnotations.initMocks(this);
- }
-
-
- @Test
- public void testReadConfigFile() {
-
- String result = "test file";
-
- ResponseEntity<String> fileDataEntity = new ResponseEntity<String>(result, HttpStatus.OK);
-
- Mockito.when(restTemplate.getForEntity(Mockito.any(URI.class), Mockito.any(Class.class))).thenReturn(fileDataEntity);
-
- try {
- String readConfigFile = diskRepoConfigFileService.readConfigFile("testCase.xml");
- assertEquals(result, readConfigFile);
- } catch (ConfigFileReadException exception) {
- eLOGGER.error("Error occurred : ", exception);
- }
-
- }
-
-}
-*/
\ No newline at end of file
public class MapperConfigUtilsTest {
- private final Logger eLOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
String mappingFileContent = "{" + " \"entries\" : ["
+ " {"
.checkIncomingJsonForMatchingDomain(inputJsonString);
assertEquals("snmp", checkIncomingJsonForMatchingDomain);
} catch (MapperConfigException exception) {
- eLOGGER.error("Error occurred : ", exception );
+ errorLogger.error("Error occurred : ", exception );
}
}
public class SmooksUtilsTest {
- private final Logger eLOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
@Test
public void testGetTransformedObjectForInput() {
vesEvent = SmooksUtils.getTransformedObjectForInput(smooks,
incomingJsonString.toString());
} catch (IOException | SAXException exception) {
- eLOGGER.error("Error occurred : ", exception );
+ errorLogger.error("Error occurred : ", exception );
}
assertEquals(vesEvent.getEvent().getCommonEventHeader().getDomain(), "UCSNMP-HEARTBEAT");
*/
@Repository
public class MappingFileDAOImpl implements MappingFileDAO {
- private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
@Value("${spring.datasource.url}")
String url;
private static Map<String, String> env;
@Override
public String uploadMappingFile(MultipartFile mappingFile, String enterpriseid) throws SQLException, IOException {
-
+ metricsLogger.info("Uploading mapping file");
env = System.getenv();
for (Map.Entry<String, String> entry : env.entrySet()) {
- LOGGER.info(entry.getKey() + ":" + entry.getValue());
+ debugLogger.info(entry.getKey() + ":" + entry.getValue());
}
if (env.containsKey("CONSUL_HOST") && env.containsKey("CONFIG_BINDING_SERVICE") && env.containsKey("HOSTNAME")) {
//TODO - Add logic to talk to Consul and CBS to get the configuration. For now, we will refer to configuration coming from docker env parameters
- LOGGER.info(">>>Dynamic configuration to be used");
+ debugLogger.info(">>>Dynamic configuration to be used");
if( (env.get("MR_DMAAPHOST")==null ||
(env.get("MR_DMAAPHOST")==null ||
(env.get("JDBC_PASSWORD")==null ))))))) {
- LOGGER.error("Some docker environment parameter is missing. Sample Usage is -\n sudo docker run -d -p 8085:8085/tcp --env URL_JDBC=jdbc:postgresql://10.53.172.129:5432/dummy --env JDBC_USERNAME=ngpuser --env JDBC_PASSWORD=root --env MR_DMAAPHOST=10.10.10.10 --env MR_DEFAULT_PORT_NUMBER=3904 --env CONSUL_HOST=10.53.172.109 --env HOSTNAME=mvp-dcaegen2-collectors-ves --env CONFIG_BINDING_SERVICE=config_binding_service -e DMAAPHOST='10.53.172.156' onap/org.onap.dcaegen2.services.mapper.vesadapter.universalvesadaptor:latest");
- System.exit(SpringApplication.exit(applicationContext, () -> {LOGGER.error("Application is stoped please provide the above environment parameter during docker run");return-1;}));
+ errorLogger.error("Some docker environment parameter is missing. Sample Usage is -\n sudo docker run -d -p 8085:8085/tcp --env URL_JDBC=jdbc:postgresql://10.53.172.129:5432/dummy --env JDBC_USERNAME=ngpuser --env JDBC_PASSWORD=root --env MR_DMAAPHOST=10.10.10.10 --env MR_DEFAULT_PORT_NUMBER=3904 --env CONSUL_HOST=10.53.172.109 --env HOSTNAME=mvp-dcaegen2-collectors-ves --env CONFIG_BINDING_SERVICE=config_binding_service -e DMAAPHOST='10.53.172.156' onap/org.onap.dcaegen2.services.mapper.vesadapter.universalvesadaptor:latest");
+ System.exit(SpringApplication.exit(applicationContext, () -> {errorLogger.error("Application is stoped please provide the above environment parameter during docker run");return-1;}));
}else {
}
} else {
- LOGGER.info(">>>Static configuration to be used");
+ debugLogger.info(">>>Static configuration to be used");
}
try (Connection con = DriverManager.getConnection(url, user, pwd);
PreparedStatement pstmt = con.prepareStatement(
"INSERT INTO mapping_file(enterpriseid, mappingfilecontents, mimetype, File_Name) VALUES (?, ?, ?, ?)")) {
- LOGGER.debug("Connection established successfully");
+ metricsLogger.info("Connection established successfully");
pstmt.setString(1, enterpriseid);
pstmt.setBytes(2, mappingFile.getBytes());
pstmt.executeUpdate();
}catch (Exception e) {
- LOGGER.error("Error occured due to :" + e.getMessage());
+ errorLogger.error("Error occured due to :{}" , e.getMessage());
throw e;
}
return "Uploaded successfully";
*/
package org.onap.dcaegen2.services.mapper.snmpmapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SnmpmapperApplication {
-
+ private static final Logger metricsLogger = LoggerFactory.getLogger("metricsLogger");
public static void main(String[] args) {
+ metricsLogger.info("Snmp Mapper main Startup");
SpringApplication.run(SnmpmapperApplication.class, args);
}
}
import org.apache.catalina.startup.ClassLoaderFactory.Repository;
import org.onap.dcaegen2.services.mapper.snmpmapper.entity.MappingFile;
import org.onap.dcaegen2.services.mapper.snmpmapper.service.MappingFileService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
@Controller
public class SnmpmapperController {
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
@Autowired
MappingFileService mappingFileService;
@RequestMapping(value = "uploadFile", method = RequestMethod.POST)
public String saveUploadedFileInDatabase(HttpServletRequest request, final @RequestParam MultipartFile[] mapper){
String result=mappingFileService.saveUploadedFileInDatabase(request, mapper);
+ debugLogger.debug("result of the mapping file upload:{}",result);
if(result.equals("success")) {
return "success.html";
}else
@Service
public class MappingFileServiceImpl implements MappingFileService {
- private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
-
+ private static final Logger debugLogger = LoggerFactory.getLogger("debugLogger");
+ private static final Logger errorLogger = LoggerFactory.getLogger("errorLogger");
static String enterpriseid;
static MappingFile mapping;
// Reading File Upload Form Input Parameters
enterpriseid = request.getParameter("eid");
- LOGGER.debug("EnterPrise ID recieved:{}",enterpriseid);
+ debugLogger.info("EnterPrise ID recieved:{}",enterpriseid);
if ((mappingfile != null) && (mappingfile.length > 0)) {
if(aFile.isEmpty()) {
continue;
} else {
- LOGGER.debug("MappingFile Name = {} with enterprise id:{}", aFile.getOriginalFilename(),enterpriseid);
+ debugLogger.debug("MappingFile Name = {} with enterprise id:{}", aFile.getOriginalFilename(),enterpriseid);
if (!aFile.getOriginalFilename().equals("")) {
try {
mapping = new MappingFile();
mappingFileDAO.uploadMappingFile(aFile,enterpriseid);
} catch (SnmpMapperException snmpMapperException) {
- LOGGER.error(snmpMapperException.getMessage());
+ errorLogger.error(snmpMapperException.getMessage());
} catch (IOException e) {
- LOGGER.error("IOException occured:{}",e.getCause());
+ errorLogger.error("IOException occured:{}",e.getCause());
return "failed";
} catch (SQLException e) {
- LOGGER.error("SQLException occured:{}",e.getCause());
+ errorLogger.error("SQLException occured:{}",e.getCause());
return "failed";
}
}
}
- LOGGER.debug("File Is Successfully Uploaded & Saved In The Database\n");
+ debugLogger.info("File Is Successfully Uploaded & Saved In The Database\n");
}
} else {
return "failed";
<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
+<!-- /*
+* ============LICENSE_START=======================================================
+* ONAP : DCAE
+* ================================================================================
+* Copyright 2018 TechMahindra
+*=================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*/ -->
- <property name="DEV_HOME" value="logs" />
+<configuration scan="true" scanPeriod="30 seconds">
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
+ </layout>
+ </appender>
+
+ <property name="LOG_DIR" value="logs" />
+
+ <appender name="metricsAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/metrics.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
+ <!-- rollover daily -->
+ <fileNamePattern>${LOG_DIR}/metrics-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
+ </rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
- </Pattern>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|UNKNOWN|%thread||snmpmapper||||COMPLETE|0|OK||%-5level||||||||||||||%class{36}:%M:%L: %m%n</pattern>
</layout>
</appender>
-
- <appender name="FILE-AUDIT"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${DEV_HOME}/debug.log</file>
- <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
- </Pattern>
- </encoder>
-
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+
+ <appender name="debugAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/debug.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
- <fileNamePattern>${DEV_HOME}/archived/debug.%d{yyyy-MM-dd}.%i.log
- </fileNamePattern>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <maxFileSize>10MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
+ <fileNamePattern>${LOG_DIR}/debug-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
</rollingPolicy>
-
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|%-5level:%class{36}:%M:%L: %m%n</pattern>
+ </layout>
</appender>
-
- <appender name="FILE-ERROR"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>ERROR</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${DEV_HOME}/error.log</file>
- <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
- <Pattern>
- %d{yyyy-MM-dd HH:mm:ss} %logger{36} - %msg%n
- </Pattern>
- </encoder>
-
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+
+ <appender name="errorAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${LOG_DIR}/error.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
- <fileNamePattern>${DEV_HOME}/archived/error.%d{yyyy-MM-dd}.%i.log
- </fileNamePattern>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <maxFileSize>10MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
+ <fileNamePattern>${LOG_DIR}/error-%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
+ <!-- each file should be at most 100MB, keep 2 days worth of history, but at most 200MB -->
+ <maxFileSize>100MB</maxFileSize>
+ <maxHistory>2</maxHistory>
+ <totalSizeCap>200MB</totalSizeCap>
</rollingPolicy>
-
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSS+00:00}|NULL|%thread|snmpmapper||||ERROR|0|ERROR:|%class{36}:%M:%L: %m%n</pattern>
+ </layout>
</appender>
- <!-- Send logs to both console and file audit -->
- <logger name="org.onap.dcaegen2.services.mapper.snmpmapper" level="debug" additivity="false">
- <appender-ref ref="FILE-AUDIT" />
- <appender-ref ref="STDOUT" />
+ <logger name="metricsLogger" level="info" additivity="false">
+ <appender-ref ref="metricsAppender" />
</logger>
- <logger name="org.onap.dcaegen2.services.mapper.snmpmapper" level="debug" additivity="false">
- <appender-ref ref="FILE-AUDIT" />
- <appender-ref ref="FILE-ERROR" />
- <appender-ref ref="STDOUT" />
- </logger> -
- <!-- <logger name="org.onap.universalvesadapter.adapter" level="error" additivity="false">
- <appender-ref ref="FILE-ERROR" />
- </logger> -->
+ <logger name="debugLogger" level="debug" additivity="false">
+ <appender-ref ref="debugAppender" />
+ </logger>
+
+ <logger name="errorLogger" level="debug" additivity="false">
+ <appender-ref ref="errorAppender" />
+ <appender-ref ref="STDOUT" />
+ </logger>
+ <!--<logger name="com" level="INFO"/> -->
+ <root level="info">
+ <!-- <appender-ref ref="STDOUT" /> -->
+ <appender-ref ref="debugAppender" />
+ </root>
-
</configuration>
\ No newline at end of file