First working draft of kafka for inference app
[demo.git] / vnfs / DAaaS / microservices / PythonApps / python-kafkaConsumer-inference-app / kubernetes-manifests / kafka.inference.deploy.yaml
diff --git a/vnfs/DAaaS/microservices/PythonApps/python-kafkaConsumer-inference-app/kubernetes-manifests/kafka.inference.deploy.yaml b/vnfs/DAaaS/microservices/PythonApps/python-kafkaConsumer-inference-app/kubernetes-manifests/kafka.inference.deploy.yaml
new file mode 100644 (file)
index 0000000..bbd3d55
--- /dev/null
@@ -0,0 +1,31 @@
+apiVersion: extensions/v1beta1
+kind: Deployment
+metadata:
+  name: python-kafkaconsumer-inference-app
+  labels:
+    app: python-kafkaconsumer-inference-app
+    tier: app
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      app: python-kafkaconsumer-inference-app
+      tier: app
+  template:
+    metadata:
+      labels:
+        app: python-kafkaconsumer-inference-app
+        tier: app
+    spec:
+      containers:
+      - name: python-kafkaconsumer-inference-app
+        image: python-kafkaconsumer-inference-app
+        ports:
+        - containerPort: 8080
+        resources:
+          requests:
+            memory: "640Mi"
+            cpu: "2500m"
+          limits:
+            memory: "1280Mi"
+            cpu: "5000m"