forked from isxcode/spark-yun
-
Notifications
You must be signed in to change notification settings - Fork 0
/
install.sh
199 lines (181 loc) · 8.08 KB
/
install.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/bin/bash
echo "开始安装"
# 检查tar命令
if ! command -v tar &>/dev/null; then
echo "【安装结果】:未检测到tar命令,请安装tar"
exit 1
fi
# 检查java命令
if ! command -v java &>/dev/null; then
echo "【安装结果】:未检测到java命令,请安装java"
exit 1
fi
# 检查node命令
if ! command -v node &>/dev/null; then
echo "【安装结果】:未检测到node命令,请安装nodejs"
exit 1
fi
# 检查pnpm命令
if ! command -v pnpm &>/dev/null; then
echo "【安装结果】:未检测到pnpm命令,请安装pnpm,参考命令:npm install pnpm@9.0.6 -g"
exit 1
fi
# 进入项目目录
BASE_PATH=$(cd "$(dirname "$0")" || exit ; pwd)
cd "${BASE_PATH}" || exit
# 创建tmp目录
TMP_DIR="${BASE_PATH}"/resources/tmp
SPARK_MIN_FILE=spark-3.4.1-bin-hadoop3.tgz
OSS_DOWNLOAD_URL=https://isxcode.oss-cn-shanghai.aliyuncs.com/zhiqingyun/install
SPARK_MIN_DOWNLOAD_URL="${OSS_DOWNLOAD_URL}"/"${SPARK_MIN_FILE}"
SPARK_MIN_DIR="${BASE_PATH}"/spark-yun-dist/spark-min
# 创建tmp目录
if [ ! -d "${TMP_DIR}" ]; then
mkdir -p "${TMP_DIR}"
fi
# 下载spark二进制文件
if [ ! -f "${TMP_DIR}"/"${SPARK_MIN_FILE}" ]; then
echo "spark-3.4.1开始下载,请耐心等待"
curl -ssL "${SPARK_MIN_DOWNLOAD_URL}" -o "${TMP_DIR}"/"${SPARK_MIN_FILE}"
if [ $? -eq 0 ]; then
echo "spark-3.4.1下载成功"
else
echo "【安装结果】:spark-3.4.1下载失败"
exit 1
fi
fi
# 创建spark-min目录
if [ ! -d "${SPARK_MIN_DIR}" ]; then
mkdir -p "${SPARK_MIN_DIR}"
fi
# 解压spark程序,并删除不需要的文件
if [ ! -f "${SPARK_MIN_DIR}"/README.md ]; then
tar vzxf "${TMP_DIR}"/"${SPARK_MIN_FILE}" --strip-components=1 -C "${SPARK_MIN_DIR}"
rm -rf "${SPARK_MIN_DIR}"/data
rm -rf "${SPARK_MIN_DIR}"/examples
rm -rf "${SPARK_MIN_DIR}"/licenses
rm -rf "${SPARK_MIN_DIR}"/python
rm -rf "${SPARK_MIN_DIR}"/R
rm "${SPARK_MIN_DIR}"/LICENSE
rm "${SPARK_MIN_DIR}"/NOTICE
rm "${SPARK_MIN_DIR}"/RELEASE
fi
# 下载spark的jars依赖
SPARK_JAR_DIR="${SPARK_MIN_DIR}"/jars
if [ ! -f "${SPARK_JAR_DIR}"/spark-sql-kafka-0-10_2.12-3.4.1.jar ]; then
echo "spark-sql-kafka-0-10_2.12-3.4.1.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/spark-sql-kafka-0-10_2.12-3.4.1.jar -o "${SPARK_JAR_DIR}"/spark-sql-kafka-0-10_2.12-3.4.1.jar
echo "spark-sql-kafka-0-10_2.12-3.4.1.jar下载成功"
fi
if [ ! -f "${SPARK_JAR_DIR}"/spark-streaming-kafka-0-10_2.12-3.4.1.jar ]; then
echo "spark-streaming-kafka-0-10_2.12-3.4.1.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/spark-streaming-kafka-0-10_2.12-3.4.1.jar -o "${SPARK_JAR_DIR}"/spark-streaming-kafka-0-10_2.12-3.4.1.jar
echo "spark-streaming-kafka-0-10_2.12-3.4.1.jar下载成功"
fi
if [ ! -f "${SPARK_JAR_DIR}"/spark-token-provider-kafka-0-10_2.12-3.4.1.jar ]; then
echo "spark-token-provider-kafka-0-10_2.12-3.4.1.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/spark-token-provider-kafka-0-10_2.12-3.4.1.jar -o "${SPARK_JAR_DIR}"/spark-token-provider-kafka-0-10_2.12-3.4.1.jar
echo "spark-token-provider-kafka-0-10_2.12-3.4.1.jar下载成功"
fi
if [ ! -f "${SPARK_JAR_DIR}"/commons-pool2-2.11.1.jar ]; then
echo "commons-pool2-2.11.1.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/commons-pool2-2.11.1.jar -o "${SPARK_JAR_DIR}"/commons-pool2-2.11.1.jar
echo "commons-pool2-2.11.1.jar下载成功"
fi
if [ ! -f "${SPARK_JAR_DIR}"/kafka-clients-3.1.2.jar ]; then
echo "kafka-clients-3.1.2.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/kafka-clients-3.1.2.jar -o "${SPARK_JAR_DIR}"/kafka-clients-3.1.2.jar
echo "kafka-clients-3.1.2.jar下载成功"
fi
# 创建系统驱动目录
JDBC_DIR="${BASE_PATH}"/resources/jdbc/system
if [ ! -d "${JDBC_DIR}" ]; then
mkdir -p "${JDBC_DIR}"
fi
# 下载数据库驱动文件
if [ ! -f "${JDBC_DIR}"/mysql-connector-j-8.1.0.jar ]; then
echo "mysql-connector-j-8.1.0.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/mysql-connector-j-8.1.0.jar -o ${JDBC_DIR}/mysql-connector-j-8.1.0.jar
echo "mysql-connector-j-8.1.0.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/postgresql-42.6.0.jar ]; then
echo "postgresql-42.6.0.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/postgresql-42.6.0.jar -o ${JDBC_DIR}/postgresql-42.6.0.jar
echo "postgresql-42.6.0.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/Dm8JdbcDriver18-8.1.1.49.jar ]; then
echo "Dm8JdbcDriver18-8.1.1.49.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/Dm8JdbcDriver18-8.1.1.49.jar -o ${JDBC_DIR}/Dm8JdbcDriver18-8.1.1.49.jar
echo "Dm8JdbcDriver18-8.1.1.49.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/clickhouse-jdbc-0.5.0.jar ]; then
echo "clickhouse-jdbc-0.5.0.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/clickhouse-jdbc-0.5.0.jar -o ${JDBC_DIR}/clickhouse-jdbc-0.5.0.jar
echo "clickhouse-jdbc-0.5.0.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/ngdbc-2.18.13.jar ]; then
echo "ngdbc-2.18.13.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/ngdbc-2.18.13.jar -o ${JDBC_DIR}/ngdbc-2.18.13.jar
echo "ngdbc-2.18.13.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/mysql-connector-java-5.1.49.jar ]; then
echo "mysql-connector-java-5.1.49.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/mysql-connector-java-5.1.49.jar -o ${JDBC_DIR}/mysql-connector-java-5.1.49.jar
echo "mysql-connector-java-5.1.49.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/mssql-jdbc-12.4.2.jre8.jar ]; then
echo "mssql-jdbc-12.4.2.jre8.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/mssql-jdbc-12.4.2.jre8.jar -o ${JDBC_DIR}/mssql-jdbc-12.4.2.jre8.jar
echo "mssql-jdbc-12.4.2.jre8.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/hive-jdbc-3.1.3-standalone.jar ]; then
echo "hive-jdbc-3.1.3-standalone.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/hive-jdbc-3.1.3-standalone.jar -o ${JDBC_DIR}/hive-jdbc-3.1.3-standalone.jar
echo "hive-jdbc-3.1.3-standalone.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/hive-jdbc-uber-2.6.3.0-235.jar ]; then
echo "hive-jdbc-2.1.1-standalone.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/hive-jdbc-uber-2.6.3.0-235.jar -o ${JDBC_DIR}/hive-jdbc-uber-2.6.3.0-235.jar
echo "hive-jdbc-2.1.1-standalone.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/ojdbc8-19.23.0.0.jar ]; then
echo "ojdbc8-19.23.0.0.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/ojdbc8-19.23.0.0.jar -o ${JDBC_DIR}/ojdbc8-19.23.0.0.jar
echo "ojdbc8-19.23.0.0.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/oceanbase-client-2.4.6.jar ]; then
echo "oceanbase-client-2.4.6.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/oceanbase-client-2.4.6.jar -o ${JDBC_DIR}/oceanbase-client-2.4.6.jar
echo "oceanbase-client-2.4.6.jar驱动下载成功"
fi
if [ ! -f "${JDBC_DIR}"/jcc-11.5.8.0.jar ]; then
echo "jcc-11.5.8.0.jar驱动开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/jcc-11.5.8.0.jar -o ${JDBC_DIR}/jcc-11.5.8.0.jar
echo "jcc-11.5.8.0.jar驱动下载成功"
fi
# 创建项目依赖文件夹
LIBS_DIR="${BASE_PATH}"/resources/libs
if [ ! -d "${LIBS_DIR}" ]; then
mkdir -p "${LIBS_DIR}"
fi
# 下载项目第三方依赖
if [ ! -f "${LIBS_DIR}"/prql-java-0.5.2.jar ]; then
echo "prql-java-0.5.2.jar开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/prql-java-0.5.2.jar -o ${LIBS_DIR}/prql-java-0.5.2.jar
echo "prql-java-0.5.2.jar下载成功"
fi
# 下载prql文件
# prql 二进制文件(mac arm64)
if [ ! -f "${BASE_PATH}"/spark-yun-backend/spark-yun-main/src/main/resources/libprql_java-osx-arm64.dylib ]; then
echo "prql_java-osx-arm64.dylib开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/libprql_java-osx-arm64.dylib -o ${BASE_PATH}/spark-yun-backend/spark-yun-main/src/main/resources/libprql_java-osx-arm64.dylib
echo "prql_java-osx-arm64.dylib下载成功"
fi
# prql 二进制文件(linux amd64)
if [ ! -f "${BASE_PATH}"/spark-yun-backend/spark-yun-main/src/main/resources/libprql_java-linux64.so ]; then
echo "prql_java-linux64.so开始下载"
curl -ssL "${OSS_DOWNLOAD_URL}"/libprql_java-linux64.so -o ${BASE_PATH}/spark-yun-backend/spark-yun-main/src/main/resources/libprql_java-linux64.so
echo "prql_java-linux64.so下载成功"
fi
# 返回状态
echo "【安装结果】:安装成功"