blob: 465afcab5a1909a31c30737081812f166f9fab2f [file] [log] [blame]
Dan Smithc2772c22022-04-08 08:48:49 -07001#!/usr/bin/python3
2
3import argparse
4import datetime
5import glob
6import itertools
7import json
8import os
Dan Smithc2772c22022-04-08 08:48:49 -07009import re
10import socket
11import subprocess
12import sys
Dan Smith1b601c72022-04-25 07:47:56 -070013
14try:
15 import psutil
16except ImportError:
17 psutil = None
18 print('No psutil, process information will not be included',
19 file=sys.stderr)
20
21try:
22 import pymysql
23except ImportError:
24 pymysql = None
25 print('No pymysql, database information will not be included',
26 file=sys.stderr)
Dan Smithc2772c22022-04-08 08:48:49 -070027
28# https://www.elastic.co/blog/found-crash-elasticsearch#mapping-explosion
29
30
31def tryint(value):
32 try:
33 return int(value)
34 except (ValueError, TypeError):
35 return value
36
37
38def get_service_stats(service):
39 stats = {'MemoryCurrent': 0}
40 output = subprocess.check_output(['/usr/bin/systemctl', 'show', service] +
41 ['-p%s' % stat for stat in stats])
42 for line in output.decode().split('\n'):
43 if not line:
44 continue
45 stat, val = line.split('=')
Harald Jensåsbab0c922022-04-26 15:46:56 +020046 stats[stat] = tryint(val)
Dan Smithc2772c22022-04-08 08:48:49 -070047
48 return stats
49
50
51def get_services_stats():
52 services = [os.path.basename(s) for s in
53 glob.glob('/etc/systemd/system/devstack@*.service')]
54 return [dict(service=service, **get_service_stats(service))
55 for service in services]
56
57
58def get_process_stats(proc):
59 cmdline = proc.cmdline()
60 if 'python' in cmdline[0]:
61 cmdline = cmdline[1:]
62 return {'cmd': cmdline[0],
63 'pid': proc.pid,
64 'args': ' '.join(cmdline[1:]),
65 'rss': proc.memory_info().rss}
66
67
68def get_processes_stats(matches):
69 me = os.getpid()
70 procs = psutil.process_iter()
71
72 def proc_matches(proc):
73 return me != proc.pid and any(
74 re.search(match, ' '.join(proc.cmdline()))
75 for match in matches)
76
77 return [
78 get_process_stats(proc)
79 for proc in procs
80 if proc_matches(proc)]
81
82
83def get_db_stats(host, user, passwd):
84 dbs = []
85 db = pymysql.connect(host=host, user=user, password=passwd,
Dan Smithfe52d7f2022-04-28 12:34:38 -070086 database='stats',
Dan Smithc2772c22022-04-08 08:48:49 -070087 cursorclass=pymysql.cursors.DictCursor)
88 with db:
89 with db.cursor() as cur:
Dan Smithfe52d7f2022-04-28 12:34:38 -070090 cur.execute('SELECT db,op,count FROM queries')
Dan Smithc2772c22022-04-08 08:48:49 -070091 for row in cur:
92 dbs.append({k: tryint(v) for k, v in row.items()})
93 return dbs
94
95
96def get_http_stats_for_log(logfile):
97 stats = {}
98 for line in open(logfile).readlines():
99 m = re.search('"([A-Z]+) /([^" ]+)( HTTP/1.1)?" ([0-9]{3}) ([0-9]+)',
100 line)
101 if m:
102 method = m.group(1)
103 path = m.group(2)
104 status = m.group(4)
105 size = int(m.group(5))
106
107 try:
108 service, rest = path.split('/', 1)
109 except ValueError:
110 # Root calls like "GET /identity"
111 service = path
112 rest = ''
113
114 stats.setdefault(service, {'largest': 0})
115 stats[service].setdefault(method, 0)
116 stats[service][method] += 1
117 stats[service]['largest'] = max(stats[service]['largest'], size)
118
119 # Flatten this for ES
120 return [{'service': service, 'log': os.path.basename(logfile),
121 **vals}
122 for service, vals in stats.items()]
123
124
125def get_http_stats(logfiles):
126 return list(itertools.chain.from_iterable(get_http_stats_for_log(log)
127 for log in logfiles))
128
129
130def get_report_info():
131 return {
132 'timestamp': datetime.datetime.now().isoformat(),
133 'hostname': socket.gethostname(),
134 }
135
136
137if __name__ == '__main__':
138 process_defaults = ['privsep', 'mysqld', 'erlang', 'etcd']
139 parser = argparse.ArgumentParser()
140 parser.add_argument('--db-user', default='root',
141 help=('MySQL user for collecting stats '
142 '(default: "root")'))
143 parser.add_argument('--db-pass', default=None,
144 help='MySQL password for db-user')
145 parser.add_argument('--db-host', default='localhost',
146 help='MySQL hostname')
147 parser.add_argument('--apache-log', action='append', default=[],
148 help='Collect API call stats from this apache log')
149 parser.add_argument('--process', action='append',
150 default=process_defaults,
151 help=('Include process stats for this cmdline regex '
152 '(default is %s)' % ','.join(process_defaults)))
153 args = parser.parse_args()
154
155 data = {
156 'services': get_services_stats(),
Dan Smith1b601c72022-04-25 07:47:56 -0700157 'db': pymysql and args.db_pass and get_db_stats(args.db_host,
158 args.db_user,
159 args.db_pass) or [],
160 'processes': psutil and get_processes_stats(args.process) or [],
Dan Smithc2772c22022-04-08 08:48:49 -0700161 'api': get_http_stats(args.apache_log),
162 'report': get_report_info(),
163 }
164
165 print(json.dumps(data, indent=2))