Livy是一个提供Rest接口和spark集群交互的服务。它可以提交Spark Job或者Spark一段代码,同步或者异步的返回结果;也提供Sparkcontext的管理,通过Restful接口或RPC客户端库。
一、概述
Livy是一个提供Rest接口和spark集群交互的服务。它可以提交Spark Job或者Spark一段代码,同步或者异步的返回结果;也提供Sparkcontext的管理,通过Restful接口或RPC客户端库。Livy也简化了与Spark与应用服务的交互,这允许通过web/mobile与Spark的使用交互。
二、开始编排部署
1.部署包准备
这里也提供上面编译好的livy部署包,有需要的小伙伴可以自行下载:
链接:https://pan.baidu.com/s/1pPCbe0lUJ6ji8rvQYsVw9A?pwd=qn7i提取码:qn7i
1)构建镜像
Dockerfile
复制
1. FROM myharbor.com/bigdata/centos:7.9.2009
2.
3. RUN rm -f /etc/localtime && ln -sv /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo "Asia/Shanghai" > /etc/timezone
4. RUN export LANG=zh_CN.UTF-8
5.
6. ### install tools
7. RUN yum install -y vim tar wget curl less telnet net-tools lsof
8.
9. RUN groupadd --system --gid=9999 admin && useradd --system -m /home/admin --uid=9999 --gid=admin admin
10.
11. RUN mkdir -p /opt/apache
12.
13. ADD apache-livy-0.8.0-incubating-SNAPSHOT-bin.zip /opt/apache/
14. ENV LIVY_HOME=/opt/apache/apache-livy
15. RUN ln -s /opt/apache/apache-livy-0.8.0-incubating-SNAPSHOT-bin $LIVY_HOME
16.
17. ADD hadoop-3.3.2.tar.gz /opt/apache/
18. ENV HADOOP_HOME=/opt/apache/hadoop
19. RUN ln -s /opt/apache/hadoop-3.3.2 $HADOOP_HOME
20. ENV HADOOP_CONFIG_DIR=${HADOOP_HOME}/etc/hadoop
21.
22. ADD spark-3.3.0-bin-hadoop3.tar.gz /opt/apache/
23. ENV SPARK_HOME=/opt/apache/spark
24. RUN ln -s /opt/apache/spark-3.3.0-bin-hadoop3 $SPARK_HOME
25.
26. ENV PATH=${LIVY_HOME}/bin:${HADOOP_HOME}/bin:${SPARK_HOME}/bin:$PATH
27.
28. RUN chown -R admin:admin /opt/apache
29.
30. WORKDIR $LIVY_HOME
31.
32. ENTRYPOINT ${LIVY_HOME}/bin/livy-server start;tail -f ${LIVY_HOME}/logs/livy-root-server.out
【注意】hadoop包里的core-site.xml,hdfs-site.xml,yarn-site.xml
开始构建镜像
复制
1. docker build -t myharbor.com/bigdata/livy:0.8.0 . --no-cache
2.
3. ### 参数解释
4. # -t:指定镜像名称
5. # . :当前目录Dockerfile
6. # -f:指定Dockerfile路径
7. # --no-cache:不缓存
8.
9. # 推送到harbor
10. docker push myharbor.com/bigdata/livy:0.8.0
2)创建livy chart模板
复制
1. helm create livy
3)修改yaml编排
livy/values.yaml
复制
1. replicaCount: 1
2.
3. image:
4. repository: myharbor.com/bigdata/livy
5. pullPolicy: IfNotPresent
6. # Overrides the image tag whose default is the chart appVersion.
7. tag: "0.8.0"
8.
9. securityContext:
10. runAsUser: 9999
11. runAsGroup: 9999
12. privileged: true
13.
14. service:
15. type: NodePort
16. port: 8998
17. nodePort: 31998
livy/templates/configmap.yaml
复制
1. apiVersion: v1
2. kind: ConfigMap
3. metadata:
4. name: {{ include "livy.fullname" . }}
5. labels:
6. {{- include "livy.labels" . | nindent 4 }}
7. data:
8. livy.conf: |-
9. livy.spark.master = yarn
10. livy.spark.deploy-mode = client
11. livy.environment = production
12. livy.impersonation.enabled = true
13. livy.server.csrf_protection.enabled = false
14. livy.server.port = {{ .Values.service.port }}
15. livy.server.session.timeout = 3600000
16. livy.server.recovery.mode = recovery
17. livy.server.recovery.state-store = filesystem
18. livy.server.recovery.state-store.url = /tmp/livy
19. livy.repl.enable-hive-context = true
20. livy-env.sh: |-
21. export JAVA_HOME=/opt/apache/jdk1.8.0_212
22. export HADOOP_HOME=/opt/apache/hadoop
23. export HADOOP_CONF_DIR=/opt/apache/hadoop/etc/hadoop
24. export SPARK_HOME=/opt/apache/spark
25. export SPARK_CONF_DIR=/opt/apache/spark/conf
26. export LIVY_LOG_DIR=/opt/apache/livy/logs
27. export LIVY_PID_DIR=/opt/apache/livy/pid-dir
28. export LIVY_SERVER_JAVA_OPTS="-Xmx512m"
29. spark-blacklist.conf: |-
30. spark.master
31. spark.submit.deployMode
32.
33. # Disallow overriding the location of Spark cached jars.
34. spark.yarn.jar
35. spark.yarn.jars
36. spark.yarn.archive
37.
38. # Don't allow users to override the RSC timeout.
39. livy.rsc.server.idle-timeout
livy/templates/deployment.yaml
复制
1. apiVersion: apps/v1
2. kind: Deployment
3. metadata:
4. name: {{ include "livy.fullname" . }}
5. labels:
6. {{- include "livy.labels" . | nindent 4 }}
7. spec:
8. {{- if not .Values.autoscaling.enabled }}
9. replicas: {{ .Values.replicaCount }}
10. {{- end }}
11. selector:
12. matchLabels:
13. {{- include "livy.selectorLabels" . | nindent 6 }}
14. template:
15. metadata:
16. {{- with .Values.podAnnotations }}
17. annotations:
18. {{- toYaml . | nindent 8 }}
19. {{- end }}
20. labels:
21. {{- include "livy.selectorLabels" . | nindent 8 }}
22. spec:
23. {{- with .Values.imagePullSecrets }}
24. imagePullSecrets:
25. {{- toYaml . | nindent 8 }}
26. {{- end }}
27. serviceAccountName: {{ include "livy.serviceAccountName" . }}
28. securityContext:
29. {{- toYaml .Values.podSecurityContext | nindent 8 }}
30. containers:
31. - name: {{ .Chart.Name }}
32. securityContext:
33. {{- toYaml .Values.securityContext | nindent 12 }}
34. image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
35. imagePullPolicy: {{ .Values.image.pullPolicy }}
36. ports:
37. - name: http
38. containerPort: 8998
39. protocol: TCP
40. livenessProbe:
41. httpGet:
42. path: /
43. port: http
44. readinessProbe:
45. httpGet:
46. path: /
47. port: http
48. resources:
49. {{- toYaml .Values.resources | nindent 12 }}
50. {{- with .Values.securityContext }}
51. securityContext:
52. runAsUser: {{ .runAsUser }}
53. runAsGroup: {{ .runAsGroup }}
54. privileged: {{ .privileged }}
55. {{- end }}
56. volumeMounts:
57. - name: {{ .Release.Name }}-livy-conf
58. mountPath: /opt/apache/livy/conf/livy.conf
59. subPath: livy.conf
60. - name: {{ .Release.Name }}-livy-env
61. mountPath: /opt/apache/livy/conf/livy-env.sh
62. subPath: livy-env.sh
63. - name: {{ .Release.Name }}-spark-blacklist-conf
64. mountPath: /opt/apache/livy/conf/spark-blacklist.conf
65. subPath: spark-blacklist.conf
66. {{- with .Values.nodeSelector }}
67. nodeSelector:
68. {{- toYaml . | nindent 8 }}
69. {{- end }}
70. {{- with .Values.affinity }}
71. affinity:
72. {{- toYaml . | nindent 8 }}
73. {{- end }}
74. {{- with .Values.tolerations }}
75. tolerations:
76. {{- toYaml . | nindent 8 }}
77. {{- end }}
78. volumes:
79. - name: {{ .Release.Name }}-livy-conf
80. configMap:
81. name: {{ include "livy.fullname" . }}
82. - name: {{ .Release.Name }}-livy-env
83. configMap:
84. name: {{ include "livy.fullname" . }}
85. - name: {{ .Release.Name }}-spark-blacklist-conf
86. configMap:
87. name: {{ include "livy.fullname" . }}
4)开始部署
复制
1. helm install livy ./livy -n livy --create-namespace
NOTES
复制
1. NOTES:
2. 1. Get the application URL by running these commands:
3. export NODE_PORT=$(kubectl get --namespace livy -o jsonpath="{.spec.ports[0].nodePort}" services livy)
4. export NODE_IP=$(kubectl get nodes --namespace livy -o jsonpath="{.items[0].status.addresses[0].address}")
5. echo http://$NODE_IP:$NODE_PORT
查看
复制
1. kubectl get pods,svc -n livy -owide
web地址:http://192.168.182.110:31998/ui
5)测试验证
复制
1. curl -s -XPOST -d '{"file":"hdfs://myhdfs/tmp/spark-examples_2.12-3.3.0.jar","className":"org.apache.spark.examples.SparkPi","name":"SparkPi-test"}' -H "Content-Type: application/json" http://local-168-182-110:31998/batches|python -m json.tool
6)卸载
复制
1. helm uninstall livy -n livy
来源: 今日头条
>>>>>>点击进入云计算专题
¥10500.00
¥499.00
¥99.00
¥499.00