[DMAAP-KAFKA] Kafka Upgrade 65/122465/6
authorseanfos <sean.osullivan@est.tech>
Tue, 6 Jul 2021 15:44:29 +0000 (16:44 +0100)
committerseanfos <sean.osullivan@est.tech>
Tue, 13 Jul 2021 13:51:03 +0000 (14:51 +0100)
Signed-off-by: seanfos <sean.osullivan@est.tech>
Change-Id: Ic83906004ca15494187972c9d8fd6a437766de85
Issue-ID: DMAAP-209

25 files changed:
pom.xml
src/main/docker/Dockerfile
src/main/docker/broker-list.sh [deleted file]
src/main/docker/cadi.properties [deleted file]
src/main/docker/consumer.properties [deleted file]
src/main/docker/create-topics.sh [deleted file]
src/main/docker/download-kafka.sh [deleted file]
src/main/docker/include/etc/confluent/docker/ensure
src/main/docker/include/etc/confluent/docker/kafka.properties.template
src/main/docker/include/etc/confluent/docker/log4j.properties.template
src/main/docker/include/etc/confluent/docker/run
src/main/docker/kafka-run-class.sh [deleted file]
src/main/docker/kafka_server_jaas.conf [deleted file]
src/main/docker/mmagent.config [deleted file]
src/main/docker/producer.properties [deleted file]
src/main/docker/start-kafka.sh [deleted file]
src/main/docker/start-kafkaOrMirrorMaker.sh [deleted file]
src/main/docker/start-mirrormaker.sh [deleted file]
src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactoryTest.java
src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProviderTest.java
src/test/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizerTest.java
src/test/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1Test.java
src/test/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1Test.java
src/test/resources/cadi.properties
version.properties

diff --git a/pom.xml b/pom.xml
index 324a075..62c0288 100644 (file)
--- a/pom.xml
+++ b/pom.xml
        OF ANY KIND, either express or implied. See the License for the specific 
        language governing permissions and limitations under the License. ============LICENSE_END========================================================= 
        ECOMP is a trademark and service mark of AT&T Intellectual Property. -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-       <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.onap.oparent</groupId>
+    <artifactId>oparent</artifactId>
+    <version>3.2.0</version>
+  </parent>
 
-       <parent>
-               <groupId>org.onap.oparent</groupId>
-               <artifactId>oparent</artifactId>
-               <version>2.1.0</version>
-       </parent>
+  <groupId>org.onap.dmaap.kafka</groupId>
+  <artifactId>kafka11aaf</artifactId>
+  <version>1.1.0-SNAPSHOT</version>
+  <name>dmaap-kafka</name>
 
-       <groupId>org.onap.dmaap.kafka</groupId>
-       <artifactId>kafka11aaf</artifactId>
-       <version>1.0.5-SNAPSHOT</version>
-       <name>dmaap-kafka</name>
-       <licenses>
-               <license>
-                       <name>Apache License Version 2.0</name>
-               </license>
-       </licenses>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <timestamp>${maven.build.timestamp}</timestamp>
+    <maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
+    <sitePath>
+      /content/sites/site/org/onap/dmaap/kafka0111/${project.artifactId}/${project.version}
+    </sitePath>
+    <skip.docker.build>true</skip.docker.build>
+    <skip.docker.push>true</skip.docker.push>
+    <nexusproxy>https://nexus.onap.org</nexusproxy>
+    <docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
+    <onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
+    <sonar.language>java</sonar.language>
+    <sonar.skip>false</sonar.skip>
+    <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports
+    </sonar.surefire.reportsPath>
+    <sonar.coverage.jacoco.xmlReportPaths>
+      ${project.reporting.outputDirectory}/jacoco-ut/jacoco.xml
+    </sonar.coverage.jacoco.xmlReportPaths>
+    <sonar.projectVersion>${project.version}</sonar.projectVersion>
+    <sonar.exclusions>**/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/**
+    </sonar.exclusions>
+    <powermock.version>1.6.4</powermock.version>
+  </properties>
 
-       <developers>
-               <developer>
-                       <name>Sunil Unnava</name>
-                       <email></email>
-                       <organization>ATT</organization>
-                       <organizationUrl>www.att.com</organizationUrl>
-               </developer>
-       </developers>
+  <distributionManagement>
+    <site>
+      <id>ecomp-site</id>
+      <url>dav:${nexusproxy}${sitePath}</url>
+    </site>
+  </distributionManagement>
 
-       <build>
-               <!-- Copy files to docker-stage to be included in image -->
-        <resources>
-                       <resource>
-                               <targetPath>${basedir}/target/docker-stage</targetPath>
-                               <directory>${basedir}/src/main/docker</directory>
-                                       </resource>
-                        <resource>
-                               <directory>${basedir}/src/main/resources</directory>
-                       </resource>
-                
+  <build>
+    <!-- Copy files to docker-stage to be included in image -->
+    <resources>
+      <resource>
+        <targetPath>${basedir}/target/docker-stage</targetPath>
+        <directory>${basedir}/src/main/docker</directory>
+      </resource>
+      <resource>
+        <directory>${basedir}/src/main/resources</directory>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.jacoco</groupId>
+        <artifactId>jacoco-maven-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <excludes>
+          </excludes>
+          <argLine>
+          </argLine>
+          <skipTests>false</skipTests>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <configuration>
+          <argLine>
+            --illegal-access=permit
+          </argLine>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target</outputDirectory>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.onap.dmaap.messagerouter.mirroragent</groupId>
+                  <artifactId>dmaapMMAgent</artifactId>
+                  <version>1.1.2</version>
+                  <destFileName>dmaapMMAgent.jar</destFileName>
+                </artifactItem>
+              </artifactItems>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <version>2.7</version>
+        <executions>
+          <execution>
+            <id>copy-jar</id>
+            <phase>install</phase>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${basedir}/target/docker-stage</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>${basedir}/target</directory>
+                  <includes>
+                    <include>dmaapMMAgent.jar</include>
+                    <include>kafka11aaf.jar</include>
+                  </includes>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>3.2.4</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <finalName>${project.artifactId}</finalName>
+              <artifactSet>
+                <excludes>
+                </excludes>
+              </artifactSet>
+              <filters>
+                <filter>
+                  <artifact>*:*</artifact>
+                  <excludes>
+                    <exclude>META-INF/*.SF</exclude>
+                    <exclude>META-INF/*.DSA</exclude>
+                    <exclude>META-INF/*.RSA</exclude>
+                  </excludes>
+                </filter>
+              </filters>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>onap-java-style</id>
+            <configuration>
+              <consoleOutput>false</consoleOutput>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 
-               </resources> 
-               <plugins>
-                       <plugin>
-                               <groupId>org.sonarsource.scanner.maven</groupId>
-                               <artifactId>sonar-maven-plugin</artifactId>
-                               <version>3.6.0.1398</version>
-                       </plugin>
-                   <plugin>
-                    <groupId>org.jacoco</groupId>
-                    <artifactId>jacoco-maven-plugin</artifactId>
-                    <executions>
-                        <execution>
-                            <id>prepare-agent</id>
-                            <goals>
-                                <goal>prepare-agent</goal>
-                            </goals>
-                        </execution>
-                        <execution>
-                            <id>report</id>
-                            <goals>
-                                <goal>report</goal>
-                            </goals>
-                            <configuration>
-                                <dataFile>${project.build.directory}/code-coverage/jacoco.exec</dataFile>
-                                <outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>
-                            </configuration>
-                        </execution>
-                    </executions>
-                </plugin>
-                       <plugin>
-                               <groupId>org.apache.maven.plugins</groupId>
-                               <artifactId>maven-surefire-plugin</artifactId>
-                               <version>2.12.4</version>
-                               <configuration>
-                                       <excludes>
-                                               <!-- exclude until junits updated <exclude>**/DME2*.java</exclude> -->
-                                       </excludes>
-                                       <!-- <skipTests>true</skipTests> -->
-                               </configuration>
-                       </plugin>
-                       <plugin>
-                               <groupId>org.apache.maven.plugins</groupId>
-                               <artifactId>maven-site-plugin</artifactId>
-                               <version>3.6</version>
-                               <dependencies>
-                                       <dependency>
-                                               <groupId>org.apache.maven.wagon</groupId>
-                                               <artifactId>wagon-webdav-jackrabbit</artifactId>
-                                               <version>2.10</version>
-                                       </dependency>
-                               </dependencies>
-                       </plugin>
-
-                       <!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> 
-                               <version>3.0.2</version> </plugin> -->
-
-                       <plugin>
-                               <groupId>org.apache.maven.plugins</groupId>
-                               <artifactId>maven-source-plugin</artifactId>
-                               <version>3.0.0</version>
-                               <executions>
-                                       <execution>
-                                               <id>attach-sources</id>
-                                               <goals>
-                                                       <goal>jar-no-fork</goal>
-                                               </goals>
-                                       </execution>
-                               </executions>
-                       </plugin>
-                       <plugin>
-                               <groupId>org.apache.maven.plugins</groupId>
-                               <artifactId>maven-release-plugin</artifactId>
-                               <version>2.5.3</version>
-                               <configuration>
-                                       <autoVersionSubmodules>true</autoVersionSubmodules>
-                                       <checkModificationExcludes>
-                                       </checkModificationExcludes>
-                               </configuration>
-                               <dependencies>
-                                       <dependency>
-                                               <groupId>org.apache.maven.scm</groupId>
-                                               <artifactId>maven-scm-provider-gitexe</artifactId>
-                                               <version>1.9.4</version>
-                                       </dependency>
-                               </dependencies>
-                       </plugin>
-
-                       <plugin>
-                               <artifactId>maven-deploy-plugin</artifactId>
-                               <version>2.8</version>
-                               <executions>
-                                       <execution>
-                                               <id>default-deploy</id>
-                                               <phase>none</phase>
-                                               <configuration>
-                                                       <skip />
-                                               </configuration>
-                                       </execution>
-                               </executions>
-                               <configuration>
-                                       <skip />
-                               </configuration>
-                       </plugin>
-                       <plugin>
-                               <artifactId>maven-dependency-plugin</artifactId>
-                               <executions>
-                                       <execution>
-                                               <id>copy</id>
-                                               <phase>package</phase>
-                                               <goals>
-                                                       <goal>copy</goal>
-                                               </goals>
-                                               <configuration>
-                                                       <outputDirectory>target</outputDirectory>
-                                                       <encoding>UTF-8</encoding>
-                                                       <artifactItems>
-                                                               <artifactItem>
-                                                                       <groupId>org.onap.dmaap.messagerouter.mirroragent</groupId>
-                                                                       <artifactId>dmaapMMAgent</artifactId>
-                                                                       <version>1.1.2</version>
-                                                                       <destFileName>dmaapMMAgent.jar</destFileName>
-                                                               </artifactItem>
-                                                       </artifactItems>
-                                               </configuration>
-                                       </execution>
-                               </executions>
-                       </plugin>
-                       <plugin>
-                               <artifactId>maven-resources-plugin</artifactId>
-                                       <version>2.7</version>
-                                       <executions>
-                                               <execution>
-                                                       <id>copy-jar</id>
-                                                       <phase>install</phase>
-                                                       <goals>
-                                                               <goal>copy-resources</goal>
-                                                       </goals>
-                                                       <configuration>
-                                                               <outputDirectory>${basedir}/target/docker-stage</outputDirectory>
-                                                               <resources>
-                                                                       <resource>
-                                                                               <directory>${basedir}/target</directory>
-                                                                               <includes>
-                                                                                       <include>dmaapMMAgent.jar</include>
-                                                                                       <include>kafka11aaf-jar-with-dependencies.jar</include>
-                                                                               </includes>
-                                                                       </resource>
-                                                               </resources>
-                                                       </configuration>
-                                               </execution>
-                                 </executions>
-                       </plugin>
-                       <plugin>
-                               <groupId>org.apache.maven.plugins</groupId>
-                               <artifactId>maven-assembly-plugin</artifactId>
-                               <version>2.4.1</version>
-                               <configuration>
-                                       <!-- get all project dependencies -->
-                                       <descriptorRefs>
-                                               <descriptorRef>jar-with-dependencies</descriptorRef>
-                                       </descriptorRefs>
-                                       <!-- MainClass in mainfest make a executable jar -->
-                                       <finalName>kafka11aaf</finalName>
-                               </configuration>
-                               <executions>
-                                       <execution>
-                                               <id>make-assembly</id>
-                                               <!-- bind to the packaging phase -->
-                                               <phase>package</phase>
-                                               <goals>
-                                                       <goal>single</goal>
-                                               </goals>
-                                       </execution>
-                               </executions>
-                       </plugin>
-
-               </plugins>
-       </build>
-
-       <properties>
-               <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-               <timestamp>${maven.build.timestamp}</timestamp>
-               <maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
-               <sitePath>/content/sites/site/org/onap/dmaap/kafka0111/${project.artifactId}/${project.version}</sitePath>
-               <skip.docker.build>true</skip.docker.build>
-               <skip.docker.push>true</skip.docker.push>
-               <nexusproxy>https://nexus.onap.org</nexusproxy>
-               <docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
-               <onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
-               <sonar.language>java</sonar.language>
-        <sonar.skip>false</sonar.skip>
-        <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>
-        <sonar.coverage.jacoco.xmlReportPaths>${project.reporting.outputDirectory}/jacoco-ut/jacoco.xml</sonar.coverage.jacoco.xmlReportPaths>
-        <sonar.projectVersion>${project.version}</sonar.projectVersion>
-       </properties>
-
-       <!-- Distribution management -->
-       <distributionManagement>
-               <site>
-                       <id>ecomp-site</id>
-                       <url>dav:${nexusproxy}${sitePath}</url>
-               </site>
-       </distributionManagement>
-
-       <dependencies>
-               <dependency>
-                       <groupId>org.onap.aaf.authz</groupId>
-                       <artifactId>aaf-cadi-aaf</artifactId>
-                       <version>2.1.2</version>
-               </dependency>
-               <dependency>
-                       <groupId>org.slf4j</groupId>
-                       <artifactId>slf4j-api</artifactId>
-                       <version>1.7.2</version>
-               </dependency>
-               <dependency>
-                       <groupId>org.slf4j</groupId>
-                       <artifactId>slf4j-simple</artifactId>
-                       <version>1.7.2</version>
-                       <scope>runtime</scope>
-               </dependency>
-               <dependency>
-                       <groupId>org.apache.kafka</groupId>
-                       <artifactId>kafka_2.11</artifactId>
-                       <version>2.3.0</version>
-                       <scope>provided</scope>
-               </dependency>
-
-               <dependency>
-                       <groupId>org.powermock</groupId>
-                       <artifactId>powermock-api-mockito</artifactId>
-                       <version>1.6.4</version>
-                       <scope>test</scope>
-               </dependency>
-
-               <dependency>
-                       <groupId>org.powermock</groupId>
-                       <artifactId>powermock-module-junit4</artifactId>
-                       <version>1.6.4</version>
-                       <scope>test</scope>
-               </dependency>
-
-               <dependency>
-                       <groupId>org.powermock</groupId>
-                       <artifactId>powermock-module-junit4-rule</artifactId>
-                       <version>1.6.4</version>
-                       <scope>test</scope>
-               </dependency>
-               <dependency>
-                       <groupId>org.mockito</groupId>
-                       <artifactId>mockito-core</artifactId>
-                       <version>1.10.19</version>
-                       <scope>test</scope>
-               </dependency>
-       </dependencies>
-
-       <profiles>
-               <profile>
-                       <id>docker</id>
-                       <properties>
-                               <skip.docker.build>false</skip.docker.build>
-                               <skip.docker.tag>false</skip.docker.tag>
-                               <skip.docker.push>false</skip.docker.push>
-                       </properties>
-                       <build>
-                               <plugins>
-                                       <plugin>
-                                               <groupId>org.codehaus.groovy.maven</groupId>
-                                               <artifactId>gmaven-plugin</artifactId>
-                                               <executions>
-                                                       <execution>
-                                                               <phase>validate</phase>
-                                                               <goals>
-                                                                       <goal>execute</goal>
-                                                               </goals>
-                                                               <configuration>
-                                                                       <properties>
-                                                                               <ver>${project.version}</ver>
-                                                                               <timestamp>${maven.build.timestamp}</timestamp>
-                                                                       </properties>
-                                                                       <source>
-                                                                               println project.properties['ver'];
-                                                                               if ( project.properties['ver'].endsWith("-SNAPSHOT") ) {
-                                                                               project.properties['dockertag1']=project.properties['ver'] +
-                                                                               "-latest";
-                                                                               project.properties['dockertag2']=project.properties['ver'] +
-                                                                               "-" + project.properties['timestamp'];
-                                                                               } else {
-                                                                               project.properties['dockertag1']=project.properties['ver'] +
-                                                                               "-STAGING-latest";
-                                                                               project.properties['dockertag2']=project.properties['ver'] +
-                                                                               "-STAGING-" + project.properties['timestamp'];
-                                                                               }
-                                                                               println 'docker tag 1: ' + project.properties['dockertag1'];
-                                                                               println 'docker tag 2: ' + project.properties['dockertag2'];
-                                                                       </source>
-                                                               </configuration>
-                                                       </execution>
-                                               </executions>
-                                       </plugin>
-                                       <!-- build docker image -->
-                                       <plugin>
-                                               <groupId>io.fabric8</groupId>
-                                               <artifactId>docker-maven-plugin</artifactId>
-                                               <version>0.28.0</version>
-                                               <configuration>
-                                                       <verbose>${docker.verbose}</verbose>
-                                                       <apiVersion>${docker.apiVersion}</apiVersion>
-                                                       <pullRegistry>${docker.pull.registry}</pullRegistry>
-                                                       <pushRegistry>${docker.push.registry}</pushRegistry>
-                                                       <images>
-                                                               
-                                                       </images>
-                                               </configuration>
-                                               <executions>
-                                                       <execution>
-                                                               <id>generate-images</id>
-                                                               <phase>install</phase>
-                                                               <goals>
-                                                                       <goal>build</goal>
-                                                               </goals>
-                                                       </execution>
-                                                       <execution>
-                                                               <id>push-images</id>
-                                                               <phase>deploy</phase>
-                                                               <goals>
-                                                                       <goal>push</goal>
-                                                               </goals>
-                                                       </execution>
-                                             </executions>
-                                       </plugin>
-                               </plugins>
-                       </build>
-               </profile>
-
-       </profiles>
+  <dependencies>
+    <dependency>
+      <groupId>org.onap.aaf.authz</groupId>
+      <artifactId>aaf-cadi-aaf</artifactId>
+      <version>2.7.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka_2.13</artifactId>
+      <version>2.8.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-mockito</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4-rule</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>1.10.19</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 
+  <profiles>
+    <profile>
+      <id>docker</id>
+      <properties>
+        <skip.docker.build>false</skip.docker.build>
+        <skip.docker.tag>false</skip.docker.tag>
+        <skip.docker.push>false</skip.docker.push>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.groovy.maven</groupId>
+            <artifactId>gmaven-plugin</artifactId>
+            <version>1.0</version>
+            <executions>
+              <execution>
+                <phase>validate</phase>
+                <goals>
+                  <goal>execute</goal>
+                </goals>
+                <configuration>
+                  <properties>
+                    <ver>${project.version}</ver>
+                    <timestamp>${maven.build.timestamp}</timestamp>
+                  </properties>
+                  <source>
+                    println project.properties['ver'];
+                    if (project.properties['ver'].endsWith("-SNAPSHOT")) {
+                      project.properties['dockertag1'] = project.properties['ver'] +
+                        "-latest";
+                      project.properties['dockertag2'] = project.properties['ver'] +
+                        "-" + project.properties['timestamp'];
+                    } else {
+                      project.properties['dockertag1'] = project.properties['ver'] +
+                        "-STAGING-latest";
+                      project.properties['dockertag2'] = project.properties['ver'] +
+                        "-STAGING-" + project.properties['timestamp'];
+                    }
+                    println 'docker tag 1: ' + project.properties['dockertag1'];
+                    println 'docker tag 2: ' + project.properties['dockertag2'];
+                  </source>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>io.fabric8</groupId>
+            <artifactId>docker-maven-plugin</artifactId>
+            <version>0.28.0</version>
+            <configuration>
+              <pullRegistry>${docker.pull.registry}</pullRegistry>
+              <pushRegistry>${docker.push.registry}</pushRegistry>
+              <images>
+                
+              </images>
+            </configuration>
+            <executions>
+              <execution>
+                <id>generate-images</id>
+                <phase>install</phase>
+                <goals>
+                  <goal>build</goal>
+                </goals>
+              </execution>
+              <execution>
+                <id>push-images</id>
+                <phase>deploy</phase>
+                <goals>
+                  <goal>push</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
 </project>
index e3becb8..930f5ba 100644 (file)
@@ -1,39 +1,26 @@
-FROM confluentinc/cp-base:5.3.1
-
-# allow arg override of required env params
-ARG KAFKA_ZOOKEEPER_CONNECT
-ENV KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT}
-ARG KAFKA_ADVERTISED_LISTENERS
-ENV KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS}
+FROM confluentinc/cp-kafka:6.2.0
 
 ENV COMPONENT=kafka \
-    KAFKA_USER=mrkafka
-
-RUN echo "===> installing ${COMPONENT}..." \
-    && wget -qO - http://packages.confluent.io/deb/3.0/archive.key | apt-key add - \
-    && echo "deb [arch=amd64] http://packages.confluent.io/deb/3.0 stable main" | tee -a /etc/apt/sources.list \
-    && apt-key update && apt-get update && apt-get install -y confluent-kafka-2.11 --force-yes \
-    \
-    && echo "===> clean up ..."  \
-    && apt-get autoremove -y && apt-get clean && rm -rf /tmp/* /var/lib/apt/lists/* \
-    \
-    && echo "===> Setting up ${COMPONENT} dirs..." \
-    && mkdir -p /var/lib/${COMPONENT}/data /etc/${COMPONENT}/secrets/cert /etc/${COMPONENT}/secrets/jaas /etc/${COMPONENT}/data  /var/log/kafka /var/log/confluent \
-    && chmod -R ag+w /etc/${COMPONENT} /var/lib/${COMPONENT}/data   /etc/${COMPONENT}/secrets /etc/${COMPONENT}/data  /var/log/kafka /var/log/confluent \
-    && chown -R root:root /var/log/kafka /var/log/confluent /var/lib/kafka /var/lib/zookeeper 
-
-COPY include/etc/confluent/docker /etc/confluent/docker
-RUN chmod -R +x /etc/confluent/docker
+    KAFKA_USER=mrkafka \
+    KAFKA_GROUP=onap
 
 COPY org.onap.dmaap.mr.trust.jks \
      org.onap.dmaap.mr.p12 \
      org.onap.dmaap.mr.keyfile \
      /etc/${COMPONENT}/secrets/cert/
 
-COPY  kafka11aaf-jar-with-dependencies.jar /usr/share/java/${COMPONENT}/
+USER root
+
+RUN userdel -r appuser && groupadd $KAFKA_GROUP && useradd $KAFKA_USER -u 1000 -G 1000,$KAFKA_GROUP
+
+WORKDIR /home/$KAFKA_USER
 
+COPY include/etc/confluent/docker/* /etc/confluent/docker/
+RUN chmod -R +x /etc/confluent/docker \
+&& mkdir -p /etc/${COMPONENT}/data /etc/${COMPONENT}/secrets \
+&& chown -R $KAFKA_USER:$KAFKA_GROUP /var/lib/${COMPONENT} /etc/${COMPONENT} /etc/confluent/docker /var/log/${COMPONENT} /var/lib/${COMPONENT} /var/log/confluent
 
-RUN useradd  -u 1000  -g 0 $KAFKA_USER
+COPY kafka11aaf.jar /usr/share/java/${COMPONENT}/
 
 USER $KAFKA_USER
 
diff --git a/src/main/docker/broker-list.sh b/src/main/docker/broker-list.sh
deleted file mode 100644 (file)
index 7f04639..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
-BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
-echo $BROKERS | sed -e 's/ /,/g'
diff --git a/src/main/docker/cadi.properties b/src/main/docker/cadi.properties
deleted file mode 100644 (file)
index 15dcb4d..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095
-aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
-aaf_env=DEV
-aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-
-cadi_truststore=/etc/kafka/secrets/cert/org.onap.dmaap.mr.trust.jks
-cadi_truststore_password=enc:7U4uOSdXQblnjiDsrqyjXugG4nChBXBBjqZ5amRaCq5yeYzbC9hQpH7BwUzYTa59
-
-cadi_keyfile=/etc/kafka/secrets/cert/org.onap.dmaap.mr.keyfile
-
-cadi_alias=dmaapmr@mr.dmaap.onap.org
-cadi_keystore=/etc/kafka/secrets/cert/org.onap.dmaap.mr.p12
-cadi_keystore_password=enc:NHmvDrri9DSkZJ_-GLuOM0e-UGi_RpVgj9xYdpAamEILHm7I2E6rjbOif2G94UYW
-cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
-
-
-cadi_loglevel=INFO
-cadi_protocols=TLSv1.1,TLSv1.2
-cadi_latitude=37.78187
-cadi_longitude=-122.26147
diff --git a/src/main/docker/consumer.properties b/src/main/docker/consumer.properties
deleted file mode 100644 (file)
index 5ec6df2..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# see kafka.consumer.ConsumerConfig for more details
-
-# Zookeeper connection string
-# comma separated host:port pairs, each corresponding to a zk
-# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002"
-#zookeeper.connect=127.0.0.1:2181
-
-# timeout in ms for connecting to zookeeper
-#zookeeper.connection.timeout.ms=6000
-
-#consumer group id
-group.id=test-consumer-group
-
-#New MirrorMaker properties for Kafka 0.11 version
-#Kafka 0.11 uses Kafka to manage consumers instead of ZK.
-bootstrap.servers=127.0.0.1:9092
-client.id=mirror_maker_consumer
-
-#Following properties are required as MR 1.2 will use Kafka 0.11 with AAF Auth wrapper.
-security.protocol=SASL_PLAINTEXT
-sasl.mechanism=PLAIN
-#java.security.auth.login.config=/opt/app/dmaap/mmagent/etc/kafka_client_jaas.conf
-sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin_secret";
-
-
-#consumer timeout:
-#consumer.timeout.ms=5000
diff --git a/src/main/docker/create-topics.sh b/src/main/docker/create-topics.sh
deleted file mode 100644 (file)
index 34945b3..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-
-if [[ -z "$START_TIMEOUT" ]]; then
-    START_TIMEOUT=600
-fi
-
-start_timeout_exceeded=false
-count=0
-step=10
-while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
-    echo "waiting for kafka to be ready"
-    sleep $step;
-    count=$(expr $count + $step)
-    if [ $count -gt $START_TIMEOUT ]; then
-        start_timeout_exceeded=true
-        break
-    fi
-done
-
-if $start_timeout_exceeded; then
-    echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
-    exit 1
-fi
-
-if [[ -n $KAFKA_CREATE_TOPICS ]]; then
-    IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
-        echo "creating topics: $topicToCreate"
-        IFS=':' read -a topicConfig <<< "$topicToCreate"
-        if [ ${topicConfig[3]} ]; then
-          JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --config cleanup.policy="${topicConfig[3]}" --if-not-exists
-        else
-          JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --if-not-exists
-        fi
-    done
-fi
diff --git a/src/main/docker/download-kafka.sh b/src/main/docker/download-kafka.sh
deleted file mode 100644 (file)
index fcc3be1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-wget https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
\ No newline at end of file
index 4bc99f3..09160f0 100644 (file)
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 #
-# Copyright 2016 Confluent Inc.
+# Copyright 2020 Confluent Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -20,5 +20,10 @@ export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"}
 echo "===> Check if $KAFKA_DATA_DIRS is writable ..."
 dub path "$KAFKA_DATA_DIRS" writable
 
-echo "===> Check if Zookeeper is healthy ..."
-cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
+if [[ -n "${KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE-}" ]] && [[ $KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE == "true" ]]
+then
+    echo "===> Skipping Zookeeper health check for SSL connections..."
+else
+    echo "===> Check if Zookeeper is healthy ..."
+    cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
+fi
\ No newline at end of file
index 242e393..5eeaea3 100644 (file)
@@ -7,14 +7,27 @@
                          'KAFKA_GC_LOG_OPTS',
                          'KAFKA_LOG4J_ROOT_LOGLEVEL',
                          'KAFKA_LOG4J_LOGGERS',
-                         'KAFKA_TOOLS_LOG4J_LOGLEVEL']
+                         'KAFKA_TOOLS_LOG4J_LOGLEVEL',
+                         'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET']
 -%}
+
+{# properties that don't fit the standard format #}
+{% set other_props = {
+  'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET' : 'zookeeper.clientCnxnSocket'
+ } -%}
+
 {% set kafka_props = env_to_props('KAFKA_', '', exclude=excluded_props) -%}
-{% for name, value in kafka_props.iteritems() -%}
+{% for name, value in kafka_props.items() -%}
 {{name}}={{value}}
 {% endfor -%}
 
+{% for k, property in other_props.items() -%}
+{% if env.get(k) != None -%}
+{{property}}={{env[k]}}
+{% endif -%}
+{% endfor -%}
+
 {% set confluent_support_props = env_to_props('CONFLUENT_SUPPORT_', 'confluent.support.') -%}
-{% for name, value in confluent_support_props.iteritems() -%}
+{% for name, value in confluent_support_props.items() -%}
 {{name}}={{value}}
 {% endfor -%}
index bdd6e5b..445a05c 100644 (file)
@@ -1,4 +1,4 @@
-:x
+
 log4j.rootLogger={{ env["KAFKA_LOG4J_ROOT_LOGLEVEL"] | default('INFO') }}, stdout
 
 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
@@ -13,8 +13,7 @@ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
   'kafka.controller': 'TRACE',
   'kafka.log.LogCleaner': 'INFO',
   'state.change.logger': 'TRACE',
-  'kafka.authorizer.logger': 'WARN',
-  'org.onap': 'INFO'
+  'kafka.authorizer.logger': 'WARN'
   } -%}
 
 
@@ -22,6 +21,6 @@ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
 {% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
 {% endif %}
 
-{% for logger,loglevel in loggers.iteritems() %}
+{% for logger,loglevel in loggers.items() %}
 log4j.logger.{{logger}}={{loglevel}}
 {% endfor %}
index 4501e22..91ac16b 100644 (file)
@@ -26,7 +26,7 @@ if [ $# -ne 0 ]; then
 fi
 
 echo "===> ENV Variables ..."
-show_env
+env
 
 echo "===> User"
 id
diff --git a/src/main/docker/kafka-run-class.sh b/src/main/docker/kafka-run-class.sh
deleted file mode 100644 (file)
index 481ebe1..0000000
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/bin/bash
-
-if [ $# -lt 1 ];
-then
-  echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]"
-  exit 1
-fi
-
-# CYGINW == 1 if Cygwin is detected, else 0.
-if [[ $(uname -a) =~ "CYGWIN" ]]; then
-  CYGWIN=1
-else
-  CYGWIN=0
-fi
-
-if [ -z "$INCLUDE_TEST_JARS" ]; then
-  INCLUDE_TEST_JARS=false
-fi
-
-# Exclude jars not necessary for running commands.
-regex="(-(test|src|scaladoc|javadoc)\.jar|jar.asc)$"
-should_include_file() {
-  if [ "$INCLUDE_TEST_JARS" = true ]; then
-    return 0
-  fi
-  file=$1
-  if [ -z "$(echo "$file" | egrep "$regex")" ] ; then
-    return 0
-  else
-    return 1
-  fi
-}
-
-base_dir=$(dirname $0)/..
-
-if [ -z "$SCALA_VERSION" ]; then
-  SCALA_VERSION=2.11.11
-fi
-
-if [ -z "$SCALA_BINARY_VERSION" ]; then
-  SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.')
-fi
-
-# run ./gradlew copyDependantLibs to get all dependant jars in a local dir
-shopt -s nullglob
-for dir in "$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*;
-do
-  if [ -z "$CLASSPATH" ] ; then
-    CLASSPATH="$dir/*"
-  else
-    CLASSPATH="$CLASSPATH:$dir/*"
-  fi
-done
-
-for file in "$base_dir"/examples/build/libs/kafka-examples*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for file in "$base_dir"/clients/build/libs/kafka-clients*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for file in "$base_dir"/streams/build/libs/kafka-streams*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for file in "$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for file in "$base_dir"/streams/build/dependant-libs-${SCALA_VERSION}/rocksdb*.jar;
-do
-  CLASSPATH="$CLASSPATH":"$file"
-done
-
-for file in "$base_dir"/tools/build/libs/kafka-tools*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for dir in "$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*;
-do
-  CLASSPATH="$CLASSPATH:$dir/*"
-done
-
-for cc_pkg in "api" "transforms" "runtime" "file" "json" "tools"
-do
-  for file in "$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar;
-  do
-    if should_include_file "$file"; then
-      CLASSPATH="$CLASSPATH":"$file"
-    fi
-  done
-  if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then
-    CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*"
-  fi
-done
-
-# classpath addition for release
-for file in "$base_dir"/libs/*;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-
-for file in "$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar;
-do
-  if should_include_file "$file"; then
-    CLASSPATH="$CLASSPATH":"$file"
-  fi
-done
-shopt -u nullglob
-
-# JMX settings
-if [ -z "$KAFKA_JMX_OPTS" ]; then
-  KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false  -Dcom.sun.management.jmxremote.ssl=false "
-fi
-
-# JMX port to use
-if [  $JMX_PORT ]; then
-  KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT "
-fi
-
-# Log directory to use
-if [ "x$LOG_DIR" = "x" ]; then
-  LOG_DIR="$base_dir/logs"
-fi
-
-# Log4j settings
-if [ -z "$KAFKA_LOG4J_OPTS" ]; then
-  # Log to console. This is a tool.
-  LOG4J_DIR="$base_dir/config/tools-log4j.properties"
-  # If Cygwin is detected, LOG4J_DIR is converted to Windows format.
-  (( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}")
-  KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}"
-else
-  # create logs directory
-  if [ ! -d "$LOG_DIR" ]; then
-    mkdir -p "$LOG_DIR"
-  fi
-fi
-
-# If Cygwin is detected, LOG_DIR is converted to Windows format.
-(( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}")
-KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR $KAFKA_LOG4J_OPTS"
-
-# Generic jvm settings you want to add
-if [ -z "$KAFKA_OPTS" ]; then
-  KAFKA_OPTS=""
-fi
-
-# Set Debug options if enabled
-if [ "x$KAFKA_DEBUG" != "x" ]; then
-
-    # Use default ports
-    DEFAULT_JAVA_DEBUG_PORT="5005"
-
-    if [ -z "$JAVA_DEBUG_PORT" ]; then
-        JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT"
-    fi
-
-    # Use the defaults if JAVA_DEBUG_OPTS was not set
-    DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT"
-    if [ -z "$JAVA_DEBUG_OPTS" ]; then
-        JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS"
-    fi
-
-    echo "Enabling Java debug options: $JAVA_DEBUG_OPTS"
-    KAFKA_OPTS="$JAVA_DEBUG_OPTS $KAFKA_OPTS"
-fi
-
-# Which java to use
-if [ -z "$JAVA_HOME" ]; then
-  JAVA="java"
-else
-  JAVA="$JAVA_HOME/bin/java"
-fi
-
-# Memory options
-if [ -z "$KAFKA_HEAP_OPTS" ]; then
-  KAFKA_HEAP_OPTS="-Xmx256M"
-fi
-
-# JVM performance options
-if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then
-  KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+DisableExplicitGC -Djava.awt.headless=true"
-fi
-
-
-while [ $# -gt 0 ]; do
-  COMMAND=$1
-  case $COMMAND in
-    -name)
-      DAEMON_NAME=$2
-      CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out
-      shift 2
-      ;;
-    -loggc)
-      if [ -z "$KAFKA_GC_LOG_OPTS" ]; then
-        GC_LOG_ENABLED="true"
-      fi
-      shift
-      ;;
-    -daemon)
-      DAEMON_MODE="true"
-      shift
-      ;;
-    *)
-      break
-      ;;
-  esac
-done
-
-# GC options
-GC_FILE_SUFFIX='-gc.log'
-GC_LOG_FILE_NAME=''
-if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then
-  GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
-  KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
-fi
-
-# If Cygwin is detected, classpath is converted to Windows format.
-(( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
-
-# Launch mode
-if [ "x$DAEMON_MODE" = "xtrue" ]; then
-  nohup $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
-else
-  exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS $1 -cp $CLASSPATH $KAFKA_OPTS "$@"
-fi
diff --git a/src/main/docker/kafka_server_jaas.conf b/src/main/docker/kafka_server_jaas.conf
deleted file mode 100644 (file)
index 3e69fc6..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-KafkaServer {
-  org.onap.dmaap.kafkaAuthorize.PlainLoginModule1 required
-  username="admin"
-  password="admin_secret"
-  user_admin="admin_secret";
-};
-Client {
-   org.apache.zookeeper.server.auth.DigestLoginModule required
-   username="kafka"
-   password="kafka_secret";
- };
-
diff --git a/src/main/docker/mmagent.config b/src/main/docker/mmagent.config
deleted file mode 100644 (file)
index 66984ca..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-kafkahome=/opt/kafka
-topicURL=http://message-router:3904
-topicname=org.onap.dmaap.mr.mirrormakeragent
-mechid=demo@people.osaaf.org
-password=YKCAVhSQ+nedsh1Nry57l19jJQSnk8gs
\ No newline at end of file
diff --git a/src/main/docker/producer.properties b/src/main/docker/producer.properties
deleted file mode 100644 (file)
index 78ff7c7..0000000
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# see kafka.producer.ProducerConfig for more details
-
-############################# Producer Basics #############################
-
-# list of brokers used for bootstrapping knowledge about the rest of the cluster
-# format: host1:port1,host2:port2 ...
-#metadata.broker.list=172.16.96.14:9092
-
-# name of the partitioner class for partitioning events; default partition spreads data randomly
-#partitioner.class=
-
-# specifies whether the messages are sent asynchronously (async) or synchronously (sync)
-producer.type=sync
-
-# specify the compression codec for all data generated: none, gzip, snappy, lz4.
-# the old config values work as well: 0, 1, 2, 3 for none, gzip, snappy, lz4, respectively
-#compression.codec=none
-
-# message encoder
-#serializer.class=kafka.serializer.DefaultEncoder
-
-# allow topic level compression
-#compressed.topics=
-
-#New MirrorMaker properties for Kafka 0.11 version
-#list of brokers used for bootstrapping knowledge about the rest of the cluster
-# format: host1:port1,host2:port2 ...
-bootstrap.servers=172.16.96.14:9092
-
-#Following properties are required as MR 1.2 will use Kafka 0.11 with AAF Auth wrapper.
-security.protocol=SASL_PLAINTEXT
-sasl.mechanism=PLAIN
-#java.security.auth.login.config=/opt/app/dmaap/mmagent/etc/kafka_client_jaas.conf
-sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin_secret";
-
-#Producer
-compression.type=none
-#serializer.class=kafka.serializer.DefaultEncoder
-batch.size=100
-client.id=mirror_maker_producer
-
-############################# Async Producer #############################
-# maximum time, in milliseconds, for buffering data on the producer queue
-#queue.buffering.max.ms=
-
-# the maximum size of the blocking queue for buffering on the producer
-#queue.buffering.max.messages=
-
-# Timeout for event enqueue:
-# 0: events will be enqueued immediately or dropped if the queue is full
-# -ve: enqueue will block indefinitely if the queue is full
-# +ve: enqueue will block up to this many milliseconds if the queue is full
-#queue.enqueue.timeout.ms=
-
-# the number of messages batched at the producer
-#batch.num.messages=
diff --git a/src/main/docker/start-kafka.sh b/src/main/docker/start-kafka.sh
deleted file mode 100644 (file)
index 6c58b74..0000000
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/bin/bash
-
-if [[ -z "$KAFKA_PORT" ]]; then
-    export KAFKA_PORT=9092
-fi
-
-create-topics.sh &
-
-if [[ -z "$KAFKA_ADVERTISED_PORT" && \
-  -z "$KAFKA_LISTENERS" && \
-  -z "$KAFKA_ADVERTISED_LISTENERS" && \
-  -S /var/run/docker.sock ]]; then
-    export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
-fi
-if [[ -z "$KAFKA_BROKER_ID" ]]; then
-    if [[ -n "$BROKER_ID_COMMAND" ]]; then
-        export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND)
-    else
-        # By default auto allocate broker ID
-        export KAFKA_BROKER_ID=-1
-    fi
-fi
-if [[ -z "$KAFKA_LOG_DIRS" ]]; then
-    export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
-fi
-if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
-    export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
-fi
-
-if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
-    sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
-    unset KAFKA_HEAP_OPTS
-fi
-
-if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
-    export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
-fi
-
-#if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then
-#  if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then
-#    export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}"
-#    export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT"
-#  fi
-
-  if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then
-    export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}"
-  fi
-
-  if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then
-    export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}"
-  fi
-
-  if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then
-    export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME
-  fi
-#else
-   #DEFAULT LISTENERS 
-#   export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}"
-#   export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then
-#  export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then
-#  export KAFKA_LISTENERS="${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-#  export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}"
-#fi
-
-if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then
-   if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then
-      export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 ))
-   fi
- #export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}"
- #export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}"
- #export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}"
-   unset KAFKA_INTER_BROKER_PORT
-   unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL
-   unset INTER_BROKER_LISTENER
-fi
-
-if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then
-    export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND)
-fi
-
-#Issue newline to config file in case there is not one already
-echo -e "\n" >> $KAFKA_HOME/config/server.properties
-
-unset KAFKA_CREATE_TOPICS
-unset KAFKA_ADVERTISED_PROTOCOL_NAME
-unset KAFKA_PROTOCOL_NAME
-
-if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-  unset KAFKA_ADVERTISED_PORT
-  unset KAFKA_ADVERTISED_HOST_NAME
-fi
-
-if [[ -n "$KAFKA_LISTENERS" ]]; then
-  unset KAFKA_PORT
-  unset KAFKA_HOST_NAME
-fi
-
-for VAR in `env`
-do
-  if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
-    kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
-    env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
-    if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
-        sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
-    else
-        echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
-    fi
-  fi
-
-  if [[ $VAR =~ ^LOG4J_ ]]; then
-    log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
-    log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
-    if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then
-        sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char
-    else
-        echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties
-    fi
-  fi
-done
-
-if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
-  eval $CUSTOM_INIT_SCRIPT
-fi
-cp /tmp/kafka11aaf-jar-with-dependencies.jar $KAFKA_HOME/libs
-cp /tmp/org.onap.dmaap.mr.keyfile  $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.trust.jks $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.p12 $KAFKA_HOME/config
-cp /tmp/kafka_server_jaas.conf $KAFKA_HOME/config
-cp /tmp/cadi.properties $KAFKA_HOME/config
-export KAFKA_OPTS="-Djava.security.auth.login.config=$KAFKA_HOME/config/kafka_server_jaas.conf"
-
-
-exec $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties
-
-
-
diff --git a/src/main/docker/start-kafkaOrMirrorMaker.sh b/src/main/docker/start-kafkaOrMirrorMaker.sh
deleted file mode 100644 (file)
index 9bb2b8a..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-if [[ -n "$START_MIRROR_MAKER" && "$START_MIRROR_MAKER" = "YES" ]]; then
-        exec start-mirrormaker.sh
-    else
-        exec start-kafka.sh
- fi
\ No newline at end of file
diff --git a/src/main/docker/start-mirrormaker.sh b/src/main/docker/start-mirrormaker.sh
deleted file mode 100644 (file)
index 355bac0..0000000
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/bin/bash
-
-if [[ -z "$KAFKA_PORT" ]]; then
-    export KAFKA_PORT=9092
-fi
-
-
-if [[ -z "$KAFKA_ADVERTISED_PORT" && \
-  -z "$KAFKA_LISTENERS" && \
-  -z "$KAFKA_ADVERTISED_LISTENERS" && \
-  -S /var/run/docker.sock ]]; then
-    export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
-fi
-if [[ -z "$KAFKA_BROKER_ID" ]]; then
-    if [[ -n "$BROKER_ID_COMMAND" ]]; then
-        export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND)
-    else
-        # By default auto allocate broker ID
-        export KAFKA_BROKER_ID=-1
-    fi
-fi
-if [[ -z "$KAFKA_LOG_DIRS" ]]; then
-    export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
-fi
-if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
-    export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
-fi
-
-if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
-    sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
-    unset KAFKA_HEAP_OPTS
-fi
-
-if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
-    export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
-fi
-
-#if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then
-#  if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then
-#    export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}"
-#    export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT"
-#  fi
-
-  if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then
-    export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}"
-  fi
-
-  if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then
-    export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}"
-  fi
-
-  if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then
-    export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME
-  fi
-#else
-   #DEFAULT LISTENERS 
-#   export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}"
-#   export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then
-#  export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then
-#  export KAFKA_LISTENERS="${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-#  export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}"
-#fi
-
-if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then
-   if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then
-      export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 ))
-   fi
- #export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}"
- #export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}"
- #export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}"
-   unset KAFKA_INTER_BROKER_PORT
-   unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL
-   unset INTER_BROKER_LISTENER
-fi
-
-if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then
-    export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND)
-fi
-
-#Issue newline to config file in case there is not one already
-echo -e "\n" >> $KAFKA_HOME/config/server.properties
-
-unset KAFKA_CREATE_TOPICS
-unset KAFKA_ADVERTISED_PROTOCOL_NAME
-unset KAFKA_PROTOCOL_NAME
-
-if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-  unset KAFKA_ADVERTISED_PORT
-  unset KAFKA_ADVERTISED_HOST_NAME
-fi
-
-if [[ -n "$KAFKA_LISTENERS" ]]; then
-  unset KAFKA_PORT
-  unset KAFKA_HOST_NAME
-fi
-
-for VAR in `env`
-do
-  if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
-    kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
-    env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
-    if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
-        sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
-    else
-        echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
-    fi
-  fi
-
-  if [[ $VAR =~ ^LOG4J_ ]]; then
-    log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
-    log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
-    if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then
-        sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char
-    else
-        echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties
-    fi
-  fi
-done
-
-if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
-  eval $CUSTOM_INIT_SCRIPT
-fi
-cp /tmp/kafka11aaf-jar-with-dependencies.jar $KAFKA_HOME/libs
-cp /tmp/org.onap.dmaap.mr.keyfile  $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.trust.jks $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.p12 $KAFKA_HOME/config
-cp /tmp/kafka_server_jaas.conf $KAFKA_HOME/config
-cp /tmp/cadi.properties $KAFKA_HOME/config
-export KAFKA_OPTS="-Djava.security.auth.login.config=$KAFKA_HOME/config/kafka_server_jaas.conf"
-
-
-
-cp /tmp/kafka-run-class.sh /opt/kafka/bin
-java -jar /tmp/dmaapMMAgent.jar
-
-
-
index 747e34e..7f441f0 100644 (file)
@@ -26,7 +26,7 @@ import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 @RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*"})
+@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
 public class AuthorizationProviderFactoryTest {
 
        @Test
index 8bd8486..1a2bd95 100644 (file)
@@ -41,7 +41,7 @@ import org.powermock.modules.junit4.PowerMockRunner;
 
 
 @RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*"})
+@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*"})
 public class Cadi3AAFProviderTest {
 
        public Cadi3AAFProvider cadi3AAFProvider;
index ae76534..7f01be4 100644 (file)
@@ -43,7 +43,7 @@ import kafka.security.auth.Resource;
 import kafka.security.auth.ResourceType;
 
 @RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*"})
+@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
 @PrepareForTest({ AuthorizationProviderFactory.class })
 public class KafkaCustomAuthorizerTest {
        @Mock
index fcb8b2e..3fd9f0e 100644 (file)
@@ -26,10 +26,12 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.MockitoAnnotations;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 @RunWith(PowerMockRunner.class)
+@PowerMockIgnore({"jdk.internal.reflect.*"})
 @PrepareForTest({ PlainLoginModule1.class })
 public class PlainLoginModule1Test {
 
index 4f26527..c354378 100644 (file)
@@ -40,7 +40,7 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 @RunWith(PowerMockRunner.class)
-@PowerMockIgnore("javax.security.auth.*")
+@PowerMockIgnore({"javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
 @PrepareForTest({ AuthorizationProviderFactory.class })
 public class PlainSaslServer1Test {
 
index 0e8523a..acbb0de 100644 (file)
@@ -15,4 +15,4 @@ cadi_truststore_password=enc:7U4uOSdXQblnjiDsrqyjXugG4nChBXBBjqZ5amRaCq5yeYzbC9h
 cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
 cadi_protocols=TLSv1.1,TLSv1.2
 cadi_latitude=37.78187
-cadi_longitude=-122.26147
+cadi_longitude=-122.26147
\ No newline at end of file
index eccc573..44511d5 100644 (file)
@@ -25,8 +25,8 @@
 # because they are used in Jenkins, whose plug-in doesn't support
 
 major=1
-minor=0
-patch=5
+minor=1
+patch=0
 
 base_version=${major}.${minor}.${patch}