First working draft of kafka for inference app
[demo.git] / vnfs / DAaaS / microservices / PythonApps / python-kafkaConsumer-inference-app / kubernetes-manifests / kafka.inference.deploy.yaml
1 apiVersion: extensions/v1beta1
2 kind: Deployment
3 metadata:
4   name: python-kafkaconsumer-inference-app
5   labels:
6     app: python-kafkaconsumer-inference-app
7     tier: app
8 spec:
9   replicas: 1
10   selector:
11     matchLabels:
12       app: python-kafkaconsumer-inference-app
13       tier: app
14   template:
15     metadata:
16       labels:
17         app: python-kafkaconsumer-inference-app
18         tier: app
19     spec:
20       containers:
21       - name: python-kafkaconsumer-inference-app
22         image: python-kafkaconsumer-inference-app
23         ports:
24         - containerPort: 8080
25         resources:
26           requests:
27             memory: "640Mi"
28             cpu: "2500m"
29           limits:
30             memory: "1280Mi"
31             cpu: "5000m"