apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
} catch (UnknownHostException e1) {
// TODO Auto-generated catch block
- e1.printStackTrace();
+ log.error("unable to get the localhost address");
}
try {
} catch (Exception e) {
log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage());
- e.printStackTrace();
}
}
});
log.error("Error occurs for " + e);
} catch (Exception e) {
log.error("Failed and go to Exception block for " + group + " " + e.getMessage());
- e.printStackTrace();
}
}
});
import com.att.dmf.mr.backends.Publisher;
import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.utils.Utils;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
import com.att.eelf.configuration.EELFLogger;
- transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
transferSetting( props, "sasl.mechanism", "PLAIN");
transferSetting( props, "bootstrap.servers",kafkaConnUrl);
import com.att.dmf.mr.backends.kafka.LiveLockAvoidance;
import com.att.dmf.mr.constants.CambriaConstants;
import com.att.dmf.mr.utils.ConfigurationReader;
-
-
+import com.att.dmf.mr.utils.Utils;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
props.put("group.id", fakeGroupName);
props.put("enable.auto.commit", "false"); // 0.11
props.put("bootstrap.servers", fkafkaBrokers);
- props.put("sasl.jaas.config",
- "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism", "PLAIN");
import com.att.dmf.mr.metabroker.Broker1;
import com.att.dmf.mr.metabroker.Topic;
import com.att.dmf.mr.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.Utils;
//import org.apache.log4-j.Logger;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
}
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
props.put("sasl.mechanism", "PLAIN");
}
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
props.put("sasl.mechanism", "PLAIN");
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
package com.att.dmf.mr.metrics.publisher;
import java.net.MalformedURLException;
+import java.nio.channels.NotYetConnectedException;
import java.util.Collection;
import java.util.TreeSet;
import java.util.UUID;
return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
} catch (MalformedURLException e) {
- throw new RuntimeException(e);
+
+ NotYetConnectedException exception=new NotYetConnectedException();
+ exception.setStackTrace(e.getStackTrace());
+
+ throw exception ;
}
}
import java.io.IOException;
import java.io.OutputStream;
import java.net.MalformedURLException;
+import java.nio.channels.NotYetConnectedException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
try {
return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
} catch (MalformedURLException e) {
- throw new RuntimeException(e);
+
+ NotYetConnectedException exception=new NotYetConnectedException();
+ exception.setStackTrace(e.getStackTrace());
+
+ throw exception ;
+
}
}
import java.io.IOException;
import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONException;
+import com.att.dmf.mr.CambriaApiException;
import com.att.dmf.mr.beans.DMaaPContext;
import com.att.nsa.configs.ConfigDbException;
/**
* @throws Exception
*/
void getApiKey(DMaaPContext dmaapContext, final String apiKey)
- throws Exception;
+ throws CambriaApiException, ConfigDbException, JSONException, IOException;
/**
* Fetching list of all the topics and returning in a templated form for
int status = HttpStatus.SC_NOT_FOUND;
String errorMsg = null;
- if (excp instanceof CambriaApiException) {
+ if (excp.getClass().toString().contains("CambriaApiException")) {
status = ((CambriaApiException) excp).getStatus();
JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
JSONObject errObject = new JSONObject(jsonTokener);
int status = HttpStatus.SC_NOT_FOUND;
String errorMsg = null;
- if (excp instanceof CambriaApiException) {
+ if (excp.getClass().toString().contains("CambriaApiException")) {
status = ((CambriaApiException) excp).getStatus();
JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
JSONObject errObject = new JSONObject(jsonTokener);
} catch (Exception excp) {
int status = HttpStatus.SC_NOT_FOUND;
String errorMsg = null;
- if (excp instanceof CambriaApiException) {
+ if (excp.getClass().toString().contains("CambriaApiException")) {
status = ((CambriaApiException) excp).getStatus();
JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
JSONObject errObject = new JSONObject(jsonTokener);
} catch (Exception excp) {
int status = HttpStatus.SC_NOT_FOUND;
String errorMsg = null;
- if (excp instanceof CambriaApiException) {
+ if (excp.getClass().toString().contains("CambriaApiException")) {
status = ((CambriaApiException) excp).getStatus();
JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
JSONObject errObject = new JSONObject(jsonTokener);
throw new CambriaApiException(errRes);
} catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
// TODO Auto-generated catch block
- e.printStackTrace();
+ LOGGER.error( e.getMessage());
}
}
import org.apache.kafka.common.errors.TopicExistsException;
import org.json.JSONArray;
+import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.stereotype.Service;
+import com.att.dmf.mr.CambriaApiException;
import com.att.dmf.mr.beans.DMaaPContext;
import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
import com.att.dmf.mr.metabroker.Topic;
/**
* @param dmaapContext
* @param apiKey
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws JSONException
* @throws Exception
*/
@Override
- public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws Exception {
+ public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException {
// TODO - We need to work on the templates and how data will be set in
// the template
LOGGER.info("Fetching detials of apikey: " + apiKey);
DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
} else {
LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
- throw new Exception("Key [" + apiKey + "] not found.");
+ throw new CambriaApiException(400,"Key [" + apiKey + "] not found.");
}
}
* @throws ServletException
* @throws KafkaConsumerCacheException
* @throws ConfigDbException
+ * @throws KeyExistsException
*/
@Autowired
public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
*/
@Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
)
- throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException {
+ throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException, KeyExistsException {
this.fMetrics = fMetrics;
this.zk = zk;
}
protected void servletSetup()
- throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException {
+ throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException, KeyExistsException {
try {
fMetrics.toJson();
if ( adminSecret != null && adminSecret.length () > 0 )
{
- try
- {
final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
adminDb.createApiKey ( "admin", adminSecret );
fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
- }
- catch ( KeyExistsException e )
- {
- throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
- }
}
// setup a backend
private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
public static final String kSetting_MailAuthUser = "mailLogin";
- public static final String kSetting_MailAuthPwd = "mailPassword";
public static final String kSetting_MailFromEmail = "mailFromEmail";
public static final String kSetting_MailFromName = "mailFromName";
public static final String kSetting_SmtpServer = "mailSmtpServer";
makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
final String un = getSetting ( kSetting_MailAuthUser, "" );
- final String pw = getSetting ( kSetting_MailAuthPwd, "" );
+ final String value=(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword")!=null)?AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword"):"";
final Session session = Session.getInstance ( prop,
new javax.mail.Authenticator()
{
@Override
protected PasswordAuthentication getPasswordAuthentication()
{
- return new PasswordAuthentication ( un, pw );
+ return new PasswordAuthentication ( un, value );
}
}
);
*******************************************************************************/
package com.att.dmf.mr.utils;
+import java.io.IOException;
+import java.io.InputStream;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
+import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
+import com.att.dmf.mr.backends.kafka.KafkaPublisher;
import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
/**
* This is an utility class for various operations for formatting
* @author nilanjana.maity
private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
private static final String BATCH_ID_FORMAT = "000000";
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(Utils.class);
private Utils() {
super();
}
return list;
}
+
+ public static String getKafkaproperty(){
+ InputStream input = new Utils().getClass().getResourceAsStream("/kafka.properties");
+ Properties props = new Properties();
+ try {
+ props.load(input);
+ } catch (IOException e) {
+ log.error("failed to read kafka.properties");
+ }
+ return props.getProperty("key");
+
+
+ }
}
--- /dev/null
+key=admin_secret;
\ No newline at end of file