Follow this demo hands-on-exercise to deploy a Twitter Streaming and Sentiment Analysis App
running on OpenShift consuming persistent storage from OpenShift Container Storage
- Register for a developer account on twitter and get your API keys. This will be required for this demo
- Register for Aylien developer account and grab your API keys. This will be required for this demo
- Verify OCS cluster status from dashboard
- verify OCS cluster status from CLI
oc project openshift-storage
TOOLS_POD=$(oc get pods -n openshift-storage -l app=rook-ceph-tools -o name)
oc rsh -n openshift-storage $TOOLS_POD
ceph -s
ceph osd tree
- change default storage class
oc get sc
oc patch storageclass gp2 -p '{"metadata":{"annotations":{"storageclass.kubernetes.io/is-default-class":"false"}}}'
oc patch storageclass ocs-storagecluster-ceph-rbd -p '{"metadata":{"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'
oc get sc
- Create a new project
oc new-project amq-streams
- Install AMQ Streams Operator from console
watch oc get all,csv
- Git clone application repository
git clone https://github.com/ksingh7/twitter_streaming_app_on_openshift_OCS.git
cd twitter_streaming_app_on_openshift_OCS
- Verify there are no PV,PVC exist on OCS
oc get pv,pvc
- Create Kafka Cluster (before running verify the domain name)
oc apply -f 01-kafka-cluster.yaml
watch oc get all
- Deploy Prometheus and Grafana
oc apply -f 02-prometheus.yaml
oc apply -f 03-grafana.yaml
watch oc get all
- Verify PV,PVC provisioned by OCS
oc get pvc -n amq-streams
oc get pv -o json | jq -r '.items | sort_by(.spec.capacity.storage)[]| select(.spec.claimRef.namespace=="amq-streams") | [.spec.claimRef.name,.spec.capacity.storage] | @tsv'
- Add prometheus as grafana's data source and Kafka/Zookeeper dashboards
sh 04-grafana-datasource.sh
- Grab Grafana URL
oc get route grafana-route --no-headers | awk '{print $2}'
- Create MongoDB template
oc create -f 05-ocs-mongodb-persistent-template.yaml -n openshift
oc -n openshift get template mongodb-persistent-ocs
- Create MongoDB app
oc new-app -n amq-streams --name=mongodb --template=mongodb-persistent-ocs \
-e MONGODB_USER=demo \
-e MONGODB_PASSWORD=demo \
-e MONGODB_DATABASE=twitter_stream \
-e MONGODB_ADMIN_PASSWORD=admin
- Exec into MongoDB POD
oc rsh $(oc get po --selector app=mongodb --no-headers | awk '{print $1}')
- Connect to MongoDB and add some recods
mongo -u demo -p demo twitter_stream
db.redhat.insert({name:'Red Hat Enterprise Linux',product_name:'RHEL',type:'linux-x86_64',release_date:'05/08/2019',version:8})
db.redhat.find().pretty()
- Allow container to run as root
oc adm policy add-scc-to-user anyuid -z default
- Deploy backend API APP
oc new-app --name=backend --docker-image=karansingh/kafka-demo-backend-service --env IS_KAFKA_SSL='False' --env MONGODB_ENDPOINT='mongodb:27017' --env KAFKA_BOOTSTRAP_ENDPOINT='cluster-kafka-bootstrap:9092' --env 'KAFKA_TOPIC=topic1' --env AYLIEN_APP_ID='YOUR_KEY_HERE' --env AYLIEN_APP_KEY='YOUR_KEY_HERE' --env TWTR_CONSUMER_KEY='YOUR_KEY_HERE' --env TWTR_CONSUMER_SECRET='YOUR_KEY_HERE' --env TWTR_ACCESS_TOKEN='YOUR_KEY_HERE' --env TWTR_ACCESS_TOKEN_SECRET='YOUR_KEY_HERE' --env MONGODB_HOST='mongodb' --env MONGODB_PORT=27017 --env MONGODB_USER='demo' --env MONGODB_PASSWORD='demo' --env MONGODB_DB_NAME='twitter_stream' -o yaml > 06-backend.yaml
oc apply -f 06-backend.yaml ; oc expose svc/backend
- In a new shell, tail logs of backend
oc logs -f $(oc get po --selector app=backend --no-headers | awk '{print $1}')
- Grab the backend route
oc get route backend --no-headers | awk '{print $2}'
-
Edit results.html, Line-62
var url
and update route -
Build Frontend Docker image
cd frontend
docker build -t kafka-demo-frontend-service:latest .
docker tag kafka-demo-frontend-service:latest karansingh/kafka-demo-frontend-service:latest
docker push karansingh/kafka-demo-frontend-service
oc new-app --name=frontend --docker-image=karansingh/kafka-demo-frontend-service:latest ; oc expose svc/frontend ; oc get route frontend
- Deleting App
oc delete all --selector app=backend
oc delete all --selector app=frontend
oc delete all --selector app=mongodb ; oc delete svc/mongodb ; oc delete secret/mongodb ; oc delete pvc/mongodb
oc adm policy remove-scc-from-user anyuid system:serviceaccount:amq-streams:default
- Local Docker setup
docker run -p 27017:27017 -d --mount type=bind,source=$PWD/data/bin,destination=/data/bin --name mongo mongo
docker exec -it mongo /bin/bash
mongo
- Create a User with Password (demo:demo)
- Create a db and add document in a collection (sampledb)
docker run -d --rm --name backend --link mongo -p 8080:8080 --env IS_KAFKA_SSL='True' --env MONGODB_ENDPOINT='mongo:27017' --env KAFKA_BOOTSTRAP_ENDPOINT='cluster-kafka-bootstrap-twitter-demo.apps.ocp42.ceph-s3.com:443' --env 'KAFKA_TOPIC=topic1' --env AYLIEN_APP_ID='SECRET' --env AYLIEN_APP_KEY='SECRET' --env TWTR_CONSUMER_KEY='SECRET' --env TWTR_CONSUMER_SECRET='SECRETa' --env TWTR_ACCESS_TOKEN='SECRET' --env TWTR_ACCESS_TOKEN_SECRET='SECRET' --env MONGODB_HOST='mongo' --env MONGODB_PORT=27017 --env MONGODB_USER='demo' --env MONGODB_PASSWORD='demo' --env MONGODB_DB_NAME='sampledb' karansingh/kafka-demo-backend-service
docker run -p 80:80 -d --rm --name frontend --link backend karansingh/kafka-demo-frontend-service