blob: e0c20f2db99b129b391f8bcc7b0d1e102e93aa7c [file] [log] [blame]
Dan Smithc2772c22022-04-08 08:48:49 -07001#!/usr/bin/python3
2
3import argparse
Dan Smith64d68672022-04-22 07:58:29 -07004import csv
Dan Smithc2772c22022-04-08 08:48:49 -07005import datetime
6import glob
7import itertools
8import json
Dan Smith64d68672022-04-22 07:58:29 -07009import logging
Dan Smithc2772c22022-04-08 08:48:49 -070010import os
Dan Smithc2772c22022-04-08 08:48:49 -070011import re
12import socket
13import subprocess
14import sys
Dan Smith1b601c72022-04-25 07:47:56 -070015
16try:
17 import psutil
18except ImportError:
19 psutil = None
20 print('No psutil, process information will not be included',
21 file=sys.stderr)
22
23try:
24 import pymysql
25except ImportError:
26 pymysql = None
27 print('No pymysql, database information will not be included',
28 file=sys.stderr)
Dan Smithc2772c22022-04-08 08:48:49 -070029
Dan Smith64d68672022-04-22 07:58:29 -070030LOG = logging.getLogger('perf')
31
Dan Smithc2772c22022-04-08 08:48:49 -070032# https://www.elastic.co/blog/found-crash-elasticsearch#mapping-explosion
33
34
35def tryint(value):
36 try:
37 return int(value)
38 except (ValueError, TypeError):
39 return value
40
41
42def get_service_stats(service):
43 stats = {'MemoryCurrent': 0}
44 output = subprocess.check_output(['/usr/bin/systemctl', 'show', service] +
45 ['-p%s' % stat for stat in stats])
46 for line in output.decode().split('\n'):
47 if not line:
48 continue
49 stat, val = line.split('=')
Harald Jensåsbab0c922022-04-26 15:46:56 +020050 stats[stat] = tryint(val)
Dan Smithc2772c22022-04-08 08:48:49 -070051
52 return stats
53
54
55def get_services_stats():
56 services = [os.path.basename(s) for s in
57 glob.glob('/etc/systemd/system/devstack@*.service')]
58 return [dict(service=service, **get_service_stats(service))
59 for service in services]
60
61
62def get_process_stats(proc):
63 cmdline = proc.cmdline()
64 if 'python' in cmdline[0]:
65 cmdline = cmdline[1:]
66 return {'cmd': cmdline[0],
67 'pid': proc.pid,
68 'args': ' '.join(cmdline[1:]),
69 'rss': proc.memory_info().rss}
70
71
72def get_processes_stats(matches):
73 me = os.getpid()
74 procs = psutil.process_iter()
75
76 def proc_matches(proc):
77 return me != proc.pid and any(
78 re.search(match, ' '.join(proc.cmdline()))
79 for match in matches)
80
81 return [
82 get_process_stats(proc)
83 for proc in procs
84 if proc_matches(proc)]
85
86
87def get_db_stats(host, user, passwd):
88 dbs = []
Dan Smith1cdf4132022-05-23 13:56:13 -070089 try:
90 db = pymysql.connect(host=host, user=user, password=passwd,
91 database='stats',
92 cursorclass=pymysql.cursors.DictCursor)
93 except pymysql.err.OperationalError as e:
94 if 'Unknown database' in str(e):
95 print('No stats database; assuming devstack failed',
96 file=sys.stderr)
97 return []
98 raise
99
Dan Smithc2772c22022-04-08 08:48:49 -0700100 with db:
101 with db.cursor() as cur:
Dan Smithfe52d7f2022-04-28 12:34:38 -0700102 cur.execute('SELECT db,op,count FROM queries')
Dan Smithc2772c22022-04-08 08:48:49 -0700103 for row in cur:
104 dbs.append({k: tryint(v) for k, v in row.items()})
105 return dbs
106
107
108def get_http_stats_for_log(logfile):
109 stats = {}
Dan Smith64d68672022-04-22 07:58:29 -0700110 apache_fields = ('host', 'a', 'b', 'date', 'tz', 'request', 'status',
111 'length', 'c', 'agent')
112 ignore_agents = ('curl', 'uwsgi', 'nova-status')
113 for line in csv.reader(open(logfile), delimiter=' '):
114 fields = dict(zip(apache_fields, line))
115 if len(fields) != len(apache_fields):
116 # Not a combined access log, so we can bail completely
117 return []
118 try:
119 method, url, http = fields['request'].split(' ')
120 except ValueError:
121 method = url = http = ''
122 if 'HTTP' not in http:
123 # Not a combined access log, so we can bail completely
124 return []
Dan Smithc2772c22022-04-08 08:48:49 -0700125
Dan Smith64d68672022-04-22 07:58:29 -0700126 # Tempest's User-Agent is unchanged, but client libraries and
127 # inter-service API calls use proper strings. So assume
128 # 'python-urllib' is tempest so we can tell it apart.
129 if 'python-urllib' in fields['agent'].lower():
130 agent = 'tempest'
131 else:
132 agent = fields['agent'].split(' ')[0]
133 if agent.startswith('python-'):
134 agent = agent.replace('python-', '')
135 if '/' in agent:
136 agent = agent.split('/')[0]
Dan Smithc2772c22022-04-08 08:48:49 -0700137
Dan Smith64d68672022-04-22 07:58:29 -0700138 if agent in ignore_agents:
139 continue
140
141 try:
142 service, rest = url.strip('/').split('/', 1)
143 except ValueError:
144 # Root calls like "GET /identity"
145 service = url.strip('/')
146 rest = ''
147
148 method_key = '%s-%s' % (agent, method)
149 try:
150 length = int(fields['length'])
151 except ValueError:
152 LOG.warning('[%s] Failed to parse length %r from line %r' % (
153 logfile, fields['length'], line))
154 length = 0
155 stats.setdefault(service, {'largest': 0})
156 stats[service].setdefault(method_key, 0)
157 stats[service][method_key] += 1
158 stats[service]['largest'] = max(stats[service]['largest'],
159 length)
Dan Smithc2772c22022-04-08 08:48:49 -0700160
161 # Flatten this for ES
162 return [{'service': service, 'log': os.path.basename(logfile),
163 **vals}
164 for service, vals in stats.items()]
165
166
167def get_http_stats(logfiles):
168 return list(itertools.chain.from_iterable(get_http_stats_for_log(log)
169 for log in logfiles))
170
171
172def get_report_info():
173 return {
174 'timestamp': datetime.datetime.now().isoformat(),
175 'hostname': socket.gethostname(),
Dan Smith64d68672022-04-22 07:58:29 -0700176 'version': 2,
Dan Smithc2772c22022-04-08 08:48:49 -0700177 }
178
179
180if __name__ == '__main__':
181 process_defaults = ['privsep', 'mysqld', 'erlang', 'etcd']
182 parser = argparse.ArgumentParser()
183 parser.add_argument('--db-user', default='root',
184 help=('MySQL user for collecting stats '
185 '(default: "root")'))
186 parser.add_argument('--db-pass', default=None,
187 help='MySQL password for db-user')
188 parser.add_argument('--db-host', default='localhost',
189 help='MySQL hostname')
190 parser.add_argument('--apache-log', action='append', default=[],
191 help='Collect API call stats from this apache log')
192 parser.add_argument('--process', action='append',
193 default=process_defaults,
194 help=('Include process stats for this cmdline regex '
195 '(default is %s)' % ','.join(process_defaults)))
196 args = parser.parse_args()
197
Dan Smith64d68672022-04-22 07:58:29 -0700198 logging.basicConfig(level=logging.WARNING)
199
Dan Smithc2772c22022-04-08 08:48:49 -0700200 data = {
201 'services': get_services_stats(),
Dan Smith1b601c72022-04-25 07:47:56 -0700202 'db': pymysql and args.db_pass and get_db_stats(args.db_host,
203 args.db_user,
204 args.db_pass) or [],
205 'processes': psutil and get_processes_stats(args.process) or [],
Dan Smithc2772c22022-04-08 08:48:49 -0700206 'api': get_http_stats(args.apache_log),
207 'report': get_report_info(),
208 }
209
210 print(json.dumps(data, indent=2))