· 9 years ago · Dec 23, 2016, 06:39 PM
1#!/bin/bash
2
3#########################################################
4# Created by Dillon Torgersen #
5# Tool designed primarily for support. #
6# Anyone is welcome to use, change, and redistribute. #
7# Last Update 12/01/16 #
8#########################################################
9
10# RUN WITH ELEVATED PRIVILEGES
11
12#### Variables to alter ####
13# Blob size sent through BSB.
14blobsize=262144
15# Amount of time BSB is to run.
16time=60
17# Set number of pings sent between both machines.
18pings=4
19# Set amount of time for sar to track packet transfers
20seconds=10
21# Raptor stats are significantly scaled back. Increase to 1700 for all lines. Default is 33.
22raptor=33
23
24# Required to view API parameters.
25read -s -p "Provide Admin Password: " AdminPass
26host=$(hostname -f)
27echo
28echo
29# Function to gather Cloud Store provider.
30echo "Select your Cloud Provider:"
31PS3='Choose provider, then select option 10 to Continue: '
32options=("AWS" "AT&T" "Atmos" "Azure" "ECS" "Virtustream" "Google" "ViPR" "Swift" "Continue" "Quit")
33select opt in "${options[@]}"
34do
35 case $opt in
36 "AWS")
37 provider=aws-s3
38 ;;
39 "AT&T")
40 provider=atmos
41 ;;
42 "Atmos")
43 provider=atmos
44 ;;
45 "Azure")
46 provider=azure
47 ;;
48 "ECS")
49 provider=s3
50 ;;
51 "Virtustream")
52 provider=s3
53 ;;
54 "Google")
55 provider=s3
56 ;;
57 "ViPR")
58 provider=s3
59 ;;
60 "Swift")
61 provider=swift-keystone
62 ;;
63 "Continue")
64 break
65 ;;
66 "Quit")
67 exit 1
68 ;;
69 *) echo invalid option;;
70 esac
71done
72# Gathering Cloud Store information.
73endpoint=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep endpoint | awk '{ print $2 }')
74accesskey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep access_key | awk '{ print $2 }')
75secretkey=$(curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/cloud_profiles.json | python -m json.tool | grep credential | awk '{ print $2 }')
76endpoint=$(echo "${endpoint:1:${#endpoint}-2}")
77accesskey=$(echo "${accesskey:1:${#accesskey}-3}")
78secretkey=$(echo "${secretkey:1:${#secretkey}-3}")
79epoch=$(date +%s)
80metrics=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 share | grep password | awk '{ print $2 }')
81metrics=$(echo "${metrics:1:${#metrics}-3}")
82manage=$(cat /opt/maginatics/raptor/raptor.conf | grep -C 15 server | grep password | awk '{ print $2 }')
83manage=$(echo "${manage:1:${#manage}-2}")
84# Saving file to /tmp so root is not required.
85log_file="/var/log/diagResults_$(date +%s).txt"
86# Creates log and redirect requests start.
87echo "Gathering system information"
88touch log_file
89echo "Maginatics Diagnostics Tool" > $log_file
90date >> $log_file
91echo "Hostname: $host" >> $log_file
92echo >> $log_file
93echo "Version/History: " >> $log_file
94curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/system_info.json | python -m json.tool >> $log_file
95echo >> $log_file
96
97# Gathers CPU stats.
98echo "Checking system resources"
99echo -e "\033[32m"
100echo "------------------- CPU -----------------" >> $log_file
101echo -e "\033[0m"
102mpstat >> $log_file
103echo >> $log_file
104sar -u 1 5 >> $log_file
105echo -e "\033[32m"
106echo >> $log_file
107echo >> $log_file
108echo -e "\033[32m"
109
110# Determining RAM utilization.
111echo "------------------- RAM -----------------" >> $log_file
112echo -e "\033[0m"
113free -h >> $log_file
114echo >> $log_file
115echo "Current swap usage" >> $log_file
116sar -W 1 5 >> $log_file
117echo >> $log_file
118echo >> $log_file
119echo -e "\033[32m"
120
121# Gathering information on disk stats.
122echo "------------------- DISK -----------------" >> $log_file
123echo -e "\033[0m"
124df -h >> $log_file
125echo >> $log_file
126echo "Current Disk stats (SIZE)" >> $log_file
127iostat -m >> $log_file
128echo >> $log_file
129echo "Average Disk stats (TIME) over 5 seconds" >> $log_file
130sar -d 1 5 | grep Average >> $log_file
131echo >> $log_file
132echo >> $log_file
133
134# Performing BSV and BSB actions against cloud store specified.
135echo "Gathering Cloud Store information (may take longer)"
136echo -e "\033[32m"
137echo "------------------- CLOUD STORE -----------------" >> $log_file
138echo -e "\033[0m"
139echo "BSV Report: " >> $log_file
140endpoint=$(echo "${endpoint:0:${#endpoint}-1}")
141if [[ "$provider" == "aws-s3" ]]
142 then
143 echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate" >> $log_file
144 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey validate >> $log_file
145 else
146 echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate" >> $log_file
147 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey validate >> $log_file
148fi
149echo >> $log_file
150echo >> $log_file
151echo "BSB Report: " >> $log_file
152if [[ "$provider" == "aws-s3" ]]
153 then
154 echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
155 echo >> $log_file
156 echo " -------- 16 Parallel requests -------- " >> $log_file
157 echo "Performing 16 Parallel requests"
158 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
159 echo >> $log_file
160 echo " -------- 64 Parallel requests -------- " >> $log_file
161 echo "Performing 64 Parallel requests"
162 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
163 echo >> $log_file
164 echo " -------- 128 Parallel requests -------- " >> $log_file
165 echo "Performing 128 Parallel requests"
166 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
167
168 else
169 echo "java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark-$epoch WRITE" >> $log_file
170 echo >> $log_file
171 echo " -------- 16 Parallel requests --------" >> $log_file
172 echo "Performing 16 Parallel requests"
173 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 16 --blob-size $blobsize --max-runtime $time --container cbbenchmark16-$epoch WRITE >> $log_file
174 echo >> $log_file
175 echo " -------- 64 Parallel requests -------- " >> $log_file
176 echo "Performing 64 Parallel requests"
177 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 64 --blob-size $blobsize --max-runtime $time --container cbbenchmark64-$epoch WRITE >> $log_file
178 echo >> $log_file
179 echo " -------- 128 Parallel requests -------- " >> $log_file
180 echo "Performing 128 Parallel requests"
181 java -jar /opt/maginatics/blobstore-cli/blobstore-cli.jar --provider $provider --endpoint $endpoint --identity $accesskey --credential $secretkey benchmark --num-parallel-requests 128 --blob-size $blobsize --max-runtime $time --container cbbenchmark128-$epoch WRITE >> $log_file
182fi
183echo >> $log_file
184echo >> $log_file
185
186# Performing network analysis, specifically a DIG report and MTR to CPS and Cloud Store.
187echo "Checking network connections"
188echo -e "\033[32m"
189echo "------------------- NETWORK -----------------" >> $log_file
190echo -e "\033[0m"
191echo "Route Configuration" >> $log_file
192echo "NOTE: VLAN will show under Iface column with INTERFACE.VLAN" >> $log_file
193route >> $log_file
194echo >> $log_file
195routel | grep 'target\|eth' >> $log_file
196echo "DNS" >> $log_file
197dig $host >> $log_file
198echo >> $log_file
199echo "My Trace Route" >> $log_file
200echo >> $log_file
201echo "EMC Cloud Portal: " >> $log_file
202echo >> $log_file
203echo "mtr -br -c $pings api.dpccloud.com -P 443 -T" >> $log_file
204mtr -br -c $pings console.dpccloud.com -P 443 -T >> $log_file
205echo >> $log_file
206echo >> $log_file
207echo "Cloud Store: " >> $log_file
208echo >> $log_file
209endpoint=$(echo "${endpoint:8:${#endpoint}-1}")
210echo "mtr -br -c $pings $endpoint -P 443 -T" >> $log_file
211mtr -br -c $pings $endpoint -P 443 -T >> $log_file
212echo >> $log_file
213echo "Average Packet Transfers by Interface" >> $log_file
214echo >> $log_file
215sar -n DEV $seconds 1 | grep Average >> $log_file
216echo >> $log_file
217echo >> $log_file
218echo >> $log_file
219
220# Verifying share availability.
221echo "Checking share availability"
222echo -e "\033[32m"
223echo "------------------- SHARE -----------------" >> $log_file
224echo -e "\033[0m"
225echo "Mount points:" >> $log_file
226mount >> $log_file
227echo >> $log_file
228echo >> $log_file
229echo "magfsadmin:" >> $log_file
230echo "magfsadmin --snapshotRoots" >> $log_file
231magfsadmin --snapshotRoots >> $log_file
232echo >> $log_file
233echo >> $log_file
234echo -e "\033[32m"
235
236# Getting the appliance health and state.
237echo "------------------- SERVICES HEALTH -----------------" >> $log_file
238echo -e "\033[0m"
239curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/virtual_machines.json | python -m json.tool >> $log_file
240echo >> $log_file
241curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/site_caches.json | python -m json.tool >> $log_file
242echo >> $log_file
243echo >> $log_file
244
245# Gathers magfs client stats
246echo "Getting MagFS client stats"
247echo -e "\033[32m"
248echo "------------------- MAGFS -----------------" >> $log_file
249echo -e "\033[0m"
250echo " -------- Transports -------- " >> $log_file
251magfsadmin --getStatistics | grep 'Operation\|transport' >> $log_file
252echo >> $log_file
253echo " -------- MagFS -------- " >> $log_file
254magfsadmin --getStatistics | grep 'Operation\|magfs' >> $log_file
255echo >> $log_file
256echo " -------- Raptor -------- " >> $log_file
257magfsadmin --getStatistics | grep 'Operation\|raptor' >> $log_file
258echo >> $log_file
259echo " -------- Glue -------- " >> $log_file
260magfsadmin --getStatistics | grep 'Operation\|glue' >> $log_file
261echo >> $log_file
262echo >> $log_file
263
264# Gathers details from the site-cache and only displaying the top half.
265echo "Reviewing cache results"
266echo -e "\033[32m"
267echo "------------------- CACHE -----------------" >> $log_file
268echo -e "\033[0m"
269echo "Checking if Site-cache is present."
270if [ -f /var/log/hippocampus/hippo.log ]
271 then
272 echo "Gathering site-cache details."
273 curl -s -k -u admin:$metrics https://$host:8443/metrics | python -m json.tool | head -n 98 >> $log_file
274 else
275 echo "Site-cache is not enabled or initialized."
276fi
277echo >> $log_file
278echo >> $log_file
279echo -e "\033[32m"
280
281# Gathers details from the Raptor stats.
282echo "Reviewing cache results"
283echo -e "\033[32m"
284echo "------------------- RAPTOR -----------------" >> $log_file
285echo -e "\033[0m"
286echo "Gathering management server details."
287echo "Raptor Metrics" >> $log_file
288curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/metrics | python -m json.tool | head -n $raptor >> $log_file
289echo >> $log_file
290echo >> $log_file
291echo "Raptor stats" >> $log_file
292curl -k -v https://admin:$manage@127.0.0.1:9000/raptor/stats >> $log_file
293echo >> $log_file
294echo >> $log_file
295echo -e "\033[32m"
296
297# Database queries to gather details on appliance DB, including status, deduplication, and distribution.
298echo "------------------- DATABASE -----------------" >> $log_file
299echo -e "\033[0m"
300# Database queries created by Thomas Sandholm
301# Use in association with .xls sheet to assist in determining metadata disk space requirements.
302echo "Querying database details."
303echo "[`date`] Start of DB Analysis..." >> $log_file 2>&1
304echo >> $log_file
305echo "[`date`] Checking chunk-level deduplication" >> $log_file 2>&1
306mysql -e "select sum(count) as total, count(*) uniq, (sum(count)/count(*)) as deduprate from raptor.chunk_meta where state != 'phantom'" >> $log_file 2>&1
307echo >> $log_file
308echo >> $log_file
309echo "[`date`] Checking inode file distribution" >> $log_file 2>&1
310cat > dist.sql.tmp <<ANY
311SET group_concat_max_len = 10485760; #10MB max length
312SELECT
313 CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
314 GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
315 ',', 5/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 5th,
316 CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
317 GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
318 ',', 25/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 25th,
319 CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
320 GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
321 ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 50th,
322 CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
323 GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
324 ',', 50/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 75th,
325 CAST(SUBSTRING_INDEX(SUBSTRING_INDEX(
326 GROUP_CONCAT(file_size ORDER BY file_size SEPARATOR ','),
327 ',', 95/100 * COUNT(*) + 1), ',', -1) AS DECIMAL) AS 95th,
328 COUNT(*) AS Total
329 FROM inodes where not(attributes & 16)
330ANY
331mysql < dist.sql.tmp raptor >> $log_file 2>&1
332rm dist.sql.tmp
333echo >> $log_file
334echo >> $log_file
335echo "[`date`] Checking chunk states" >> $log_file 2>&1
336mysql -e "select state,count(*) from raptor.chunk_meta group by state" >> $log_file 2>&1
337echo "[`date`] Number of small files with potential chunk alignment loss" >> $log_file 2>&1
338mysql -e "select count(*) from raptor.inodes where not(attributes & 16) and file_size < 262144" >> $log_file 2>&1
339echo >> $log_file
340echo >> $log_file
341echo "[`date`] Checking Chunk Map Rows" >> $log_file 2>&1
342mysql -e "select count(*) from raptor.chunk_map" >> $log_file 2>&1
343echo >> $log_file
344echo >> $log_file
345echo "[`date`] Checking Table status" >> $log_file 2>&1
346mysql -e "show table status from raptor" >> $log_file 2>&1
347echo >> $log_file
348echo >> $log_file
349echo -e "\033[32m"
350
351# Gathers the latest events and presents in a list.
352echo "------------------- LATEST EVENTS -----------------" >> $log_file
353echo -e "\033[0m"
354curl -s --Header "MAGFS_USERNAME: local/admin" --header "MAGFS_PASSWORD: $AdminPass" -H "Content-Type:application/json" -H "Accept: application/json" --insecure https://$host:4444/api/v1/events.json | python -m json.tool | head -n 80 >> $log_file
355echo >> $log_file
356echo >> $log_file
357echo -e "\033[32m"
358echo "Your file is located in $log_file"
359echo -e "\033[0m"