Merge tag '0.4.0' into debian/mitaka
OSprofiler 0.4.0
- Introduce new profile.TracedMeta meta class which can be used
to trace all methods of all children class of this class
- HTML reports performance was improved
- osprofiler trace show <filepath> --html
will generate HTML report from json formated results
Thomas Goirand
8 years ago
134 | 134 | all trace points related to one trace from collector |
135 | 135 | * parent_id - <uuid> of parent trace point |
136 | 136 | * trace_id - <uuid> of current trace point |
137 | * info - it's dictionary that contains user information passed via calls of | |
137 | * info - the dictionary that contains user information passed when calling | |
138 | 138 | profiler **start()** & **stop()** methods. |
139 | 139 | |
140 | 140 |
23 | 23 | |
24 | 24 | _conf = configparser.ConfigParser() |
25 | 25 | _conf.read(os.path.join( |
26 | os.path.dirname(os.path.dirname(__file__)), 'setup.cfg')) | |
26 | os.path.dirname(os.path.dirname(__file__)), "setup.cfg")) | |
27 | 27 | try: |
28 | __version__ = _conf.get('metadata', 'version') | |
28 | __version__ = _conf.get("metadata", "version") | |
29 | 29 | except (configparser.NoOptionError, configparser.NoSectionError): |
30 | 30 | __version__ = None |
28 | 28 | # Taken/slightly modified from: |
29 | 29 | # https://mail.python.org/pipermail/python-checkins/2012-June/114532.html |
30 | 30 | def compare_digest(a, b): |
31 | """Returns the equivalent of 'a == b', but avoids content based short | |
32 | circuiting to reduce the vulnerability to timing attacks. | |
31 | """Returns the equivalent of 'a == b'. | |
32 | ||
33 | This method avoids content based short circuiting to reduce the | |
34 | vulnerability to timing attacks. | |
33 | 35 | """ |
34 | 36 | # We assume the length of the expected digest is public knowledge, |
35 | 37 | # thus this early return isn't leaking anything an attacker wouldn't |
24 | 24 | value = os.environ.get(arg) |
25 | 25 | if value: |
26 | 26 | return value |
27 | return kwargs.get('default', '') | |
27 | return kwargs.get("default", "") | |
28 | 28 | |
29 | 29 | |
30 | 30 | def arg(*args, **kwargs): |
45 | 45 | def add_arg(func, *args, **kwargs): |
46 | 46 | """Bind CLI arguments to a shell.py `do_foo` function.""" |
47 | 47 | |
48 | if not hasattr(func, 'arguments'): | |
48 | if not hasattr(func, "arguments"): | |
49 | 49 | func.arguments = [] |
50 | 50 | |
51 | 51 | # NOTE(sirp): avoid dups that can occur when the module is shared across |
27 | 27 | class TraceCommands(BaseCommand): |
28 | 28 | group_name = "trace" |
29 | 29 | |
30 | @cliutils.arg('trace_id', help='trace id') | |
31 | @cliutils.arg('--json', dest='use_json', action='store_true', | |
32 | help='show trace in JSON') | |
33 | @cliutils.arg('--html', dest='use_html', action='store_true', | |
34 | help='show trace in HTML') | |
35 | @cliutils.arg('--out', dest='file_name', help='save output in file') | |
30 | @cliutils.arg("trace", help="File with trace or trace id") | |
31 | @cliutils.arg("--json", dest="use_json", action="store_true", | |
32 | help="show trace in JSON") | |
33 | @cliutils.arg("--html", dest="use_html", action="store_true", | |
34 | help="show trace in HTML") | |
35 | @cliutils.arg("--out", dest="file_name", help="save output in file") | |
36 | 36 | def show(self, args): |
37 | 37 | """Displays trace-results by given trace id in HTML or JSON format.""" |
38 | try: | |
39 | import ceilometerclient.client | |
40 | import ceilometerclient.exc | |
41 | import ceilometerclient.shell | |
42 | except ImportError: | |
43 | raise ImportError( | |
44 | "To use this command, you should install 'ceilometerclient' " | |
45 | "manually. Use command:\n 'pip install ceilometerclient'.") | |
46 | try: | |
47 | client = ceilometerclient.client.get_client( | |
48 | args.ceilometer_api_version, **args.__dict__) | |
49 | notifications = ceiloparser.get_notifications( | |
50 | client, args.trace_id) | |
51 | except Exception as e: | |
52 | if hasattr(e, 'http_status') and e.http_status == 401: | |
53 | msg = "Invalid OpenStack Identity credentials." | |
54 | else: | |
55 | msg = "Something has gone wrong. See logs for more details." | |
56 | 38 | |
57 | raise exc.CommandError(msg) | |
39 | trace = None | |
58 | 40 | |
59 | if not notifications: | |
41 | if os.path.exists(args.trace): | |
42 | trace = json.load(open(args.trace)) | |
43 | else: | |
44 | try: | |
45 | import ceilometerclient.client | |
46 | import ceilometerclient.exc | |
47 | import ceilometerclient.shell | |
48 | except ImportError: | |
49 | raise ImportError( | |
50 | "To use this command, you should install " | |
51 | "'ceilometerclient' manually. Use command:\n " | |
52 | "'pip install ceilometerclient'.") | |
53 | try: | |
54 | client = ceilometerclient.client.get_client( | |
55 | args.ceilometer_api_version, **args.__dict__) | |
56 | notifications = ceiloparser.get_notifications( | |
57 | client, args.trace) | |
58 | except Exception as e: | |
59 | if hasattr(e, "http_status") and e.http_status == 401: | |
60 | msg = "Invalid OpenStack Identity credentials." | |
61 | else: | |
62 | msg = "Something has gone wrong. See logs for more details" | |
63 | raise exc.CommandError(msg) | |
64 | ||
65 | if notifications: | |
66 | trace = ceiloparser.parse_notifications(notifications) | |
67 | ||
68 | if not trace: | |
60 | 69 | msg = ("Trace with UUID %s not found. " |
61 | 70 | "There are 3 possible reasons: \n" |
62 | 71 | " 1) You are using not admin credentials\n" |
63 | 72 | " 2) You specified wrong trace id\n" |
64 | 73 | " 3) You specified wrong HMAC Key in original calling" |
65 | % args.trace_id) | |
74 | % args.trace) | |
66 | 75 | raise exc.CommandError(msg) |
67 | 76 | |
68 | parsed_notifications = ceiloparser.parse_notifications(notifications) | |
69 | ||
70 | 77 | if args.use_json: |
71 | output = json.dumps(parsed_notifications) | |
78 | output = json.dumps(trace) | |
72 | 79 | elif args.use_html: |
73 | 80 | with open(os.path.join(os.path.dirname(__file__), |
74 | 81 | "template.html")) as html_template: |
75 | 82 | output = html_template.read().replace( |
76 | "$DATA", json.dumps(parsed_notifications, indent=2)) | |
83 | "$DATA", json.dumps(trace, indent=2)) | |
77 | 84 | else: |
78 | 85 | raise exc.CommandError("You should choose one of the following " |
79 | 86 | "output-formats: --json or --html.") |
80 | 87 | |
81 | 88 | if args.file_name: |
82 | with open(args.file_name, 'w+') as output_file: | |
89 | with open(args.file_name, "w+") as output_file: | |
83 | 90 | output_file.write(output) |
84 | 91 | else: |
85 | 92 | print (output) |
67 | 67 | add_help=True |
68 | 68 | ) |
69 | 69 | |
70 | parser.add_argument('-v', '--version', | |
71 | action='version', | |
70 | parser.add_argument("-v", "--version", | |
71 | action="version", | |
72 | 72 | version=osprofiler.__version__) |
73 | 73 | |
74 | 74 | self._append_ceilometer_args(parser) |
78 | 78 | return parser |
79 | 79 | |
80 | 80 | def _append_ceilometer_args(self, parent_parser): |
81 | parser = parent_parser.add_argument_group('ceilometer') | |
81 | parser = parent_parser.add_argument_group("ceilometer") | |
82 | 82 | parser.add_argument( |
83 | '--ceilometer-url', default=cliutils.env('CEILOMETER_URL'), | |
84 | help='Defaults to env[CEILOMETER_URL].') | |
83 | "--ceilometer-url", default=cliutils.env("CEILOMETER_URL"), | |
84 | help="Defaults to env[CEILOMETER_URL].") | |
85 | 85 | parser.add_argument( |
86 | '--ceilometer-api-version', | |
87 | default=cliutils.env('CEILOMETER_API_VERSION', default='2'), | |
88 | help='Defaults to env[CEILOMETER_API_VERSION] or 2.') | |
86 | "--ceilometer-api-version", | |
87 | default=cliutils.env("CEILOMETER_API_VERSION", default="2"), | |
88 | help="Defaults to env[CEILOMETER_API_VERSION] or 2.") | |
89 | 89 | |
90 | 90 | def _append_identity_args(self, parent_parser): |
91 | 91 | # FIXME(fabgia): identity related parameters should be passed by the |
92 | 92 | # Keystone client itself to avoid constant update in all the services |
93 | 93 | # clients. When this fix is merged this method can be made obsolete. |
94 | 94 | # Bug: https://bugs.launchpad.net/python-keystoneclient/+bug/1332337 |
95 | parser = parent_parser.add_argument_group('identity') | |
96 | parser.add_argument('-k', '--insecure', | |
95 | parser = parent_parser.add_argument_group("identity") | |
96 | parser.add_argument("-k", "--insecure", | |
97 | 97 | default=False, |
98 | action='store_true', | |
98 | action="store_true", | |
99 | 99 | help="Explicitly allow osprofiler to " |
100 | 100 | "perform \"insecure\" SSL (https) requests. " |
101 | 101 | "The server's certificate will " |
104 | 104 | "caution.") |
105 | 105 | |
106 | 106 | # User related options |
107 | parser.add_argument('--os-username', | |
108 | default=cliutils.env('OS_USERNAME'), | |
109 | help='Defaults to env[OS_USERNAME].') | |
110 | ||
111 | parser.add_argument('--os-user-id', | |
112 | default=cliutils.env('OS_USER_ID'), | |
113 | help='Defaults to env[OS_USER_ID].') | |
114 | ||
115 | parser.add_argument('--os-password', | |
116 | default=cliutils.env('OS_PASSWORD'), | |
117 | help='Defaults to env[OS_PASSWORD].') | |
107 | parser.add_argument("--os-username", | |
108 | default=cliutils.env("OS_USERNAME"), | |
109 | help="Defaults to env[OS_USERNAME].") | |
110 | ||
111 | parser.add_argument("--os-user-id", | |
112 | default=cliutils.env("OS_USER_ID"), | |
113 | help="Defaults to env[OS_USER_ID].") | |
114 | ||
115 | parser.add_argument("--os-password", | |
116 | default=cliutils.env("OS_PASSWORD"), | |
117 | help="Defaults to env[OS_PASSWORD].") | |
118 | 118 | |
119 | 119 | # Domain related options |
120 | parser.add_argument('--os-user-domain-id', | |
121 | default=cliutils.env('OS_USER_DOMAIN_ID'), | |
122 | help='Defaults to env[OS_USER_DOMAIN_ID].') | |
123 | ||
124 | parser.add_argument('--os-user-domain-name', | |
125 | default=cliutils.env('OS_USER_DOMAIN_NAME'), | |
126 | help='Defaults to env[OS_USER_DOMAIN_NAME].') | |
127 | ||
128 | parser.add_argument('--os-project-domain-id', | |
129 | default=cliutils.env('OS_PROJECT_DOMAIN_ID'), | |
130 | help='Defaults to env[OS_PROJECT_DOMAIN_ID].') | |
131 | ||
132 | parser.add_argument('--os-project-domain-name', | |
133 | default=cliutils.env('OS_PROJECT_DOMAIN_NAME'), | |
134 | help='Defaults to env[OS_PROJECT_DOMAIN_NAME].') | |
120 | parser.add_argument("--os-user-domain-id", | |
121 | default=cliutils.env("OS_USER_DOMAIN_ID"), | |
122 | help="Defaults to env[OS_USER_DOMAIN_ID].") | |
123 | ||
124 | parser.add_argument("--os-user-domain-name", | |
125 | default=cliutils.env("OS_USER_DOMAIN_NAME"), | |
126 | help="Defaults to env[OS_USER_DOMAIN_NAME].") | |
127 | ||
128 | parser.add_argument("--os-project-domain-id", | |
129 | default=cliutils.env("OS_PROJECT_DOMAIN_ID"), | |
130 | help="Defaults to env[OS_PROJECT_DOMAIN_ID].") | |
131 | ||
132 | parser.add_argument("--os-project-domain-name", | |
133 | default=cliutils.env("OS_PROJECT_DOMAIN_NAME"), | |
134 | help="Defaults to env[OS_PROJECT_DOMAIN_NAME].") | |
135 | 135 | |
136 | 136 | # Project V3 or Tenant V2 related options |
137 | parser.add_argument('--os-project-id', | |
138 | default=cliutils.env('OS_PROJECT_ID'), | |
139 | help='Another way to specify tenant ID. ' | |
140 | 'This option is mutually exclusive with ' | |
141 | ' --os-tenant-id. ' | |
142 | 'Defaults to env[OS_PROJECT_ID].') | |
143 | ||
144 | parser.add_argument('--os-project-name', | |
145 | default=cliutils.env('OS_PROJECT_NAME'), | |
146 | help='Another way to specify tenant name. ' | |
147 | 'This option is mutually exclusive with ' | |
148 | ' --os-tenant-name. ' | |
149 | 'Defaults to env[OS_PROJECT_NAME].') | |
150 | ||
151 | parser.add_argument('--os-tenant-id', | |
152 | default=cliutils.env('OS_TENANT_ID'), | |
153 | help='This option is mutually exclusive with ' | |
154 | ' --os-project-id. ' | |
155 | 'Defaults to env[OS_PROJECT_ID].') | |
156 | ||
157 | parser.add_argument('--os-tenant-name', | |
158 | default=cliutils.env('OS_TENANT_NAME'), | |
159 | help='Defaults to env[OS_TENANT_NAME].') | |
137 | parser.add_argument("--os-project-id", | |
138 | default=cliutils.env("OS_PROJECT_ID"), | |
139 | help="Another way to specify tenant ID. " | |
140 | "This option is mutually exclusive with " | |
141 | " --os-tenant-id. " | |
142 | "Defaults to env[OS_PROJECT_ID].") | |
143 | ||
144 | parser.add_argument("--os-project-name", | |
145 | default=cliutils.env("OS_PROJECT_NAME"), | |
146 | help="Another way to specify tenant name. " | |
147 | "This option is mutually exclusive with " | |
148 | " --os-tenant-name. " | |
149 | "Defaults to env[OS_PROJECT_NAME].") | |
150 | ||
151 | parser.add_argument("--os-tenant-id", | |
152 | default=cliutils.env("OS_TENANT_ID"), | |
153 | help="This option is mutually exclusive with " | |
154 | " --os-project-id. " | |
155 | "Defaults to env[OS_PROJECT_ID].") | |
156 | ||
157 | parser.add_argument("--os-tenant-name", | |
158 | default=cliutils.env("OS_TENANT_NAME"), | |
159 | help="Defaults to env[OS_TENANT_NAME].") | |
160 | 160 | |
161 | 161 | # Auth related options |
162 | parser.add_argument('--os-auth-url', | |
163 | default=cliutils.env('OS_AUTH_URL'), | |
164 | help='Defaults to env[OS_AUTH_URL].') | |
165 | ||
166 | parser.add_argument('--os-auth-token', | |
167 | default=cliutils.env('OS_AUTH_TOKEN'), | |
168 | help='Defaults to env[OS_AUTH_TOKEN].') | |
169 | ||
170 | parser.add_argument('--os-cacert', | |
171 | metavar='<ca-certificate-file>', | |
172 | dest='os_cacert', | |
173 | default=cliutils.env('OS_CACERT'), | |
174 | help='Path of CA TLS certificate(s) used to verify' | |
175 | ' the remote server\'s certificate. Without this ' | |
176 | 'option ceilometer looks for the default system CA' | |
177 | ' certificates.') | |
178 | ||
179 | parser.add_argument('--os-cert', | |
180 | help='Path of certificate file to use in SSL ' | |
181 | 'connection. This file can optionally be ' | |
182 | 'prepended with the private key.') | |
183 | ||
184 | parser.add_argument('--os-key', | |
185 | help='Path of client key to use in SSL ' | |
186 | 'connection. This option is not necessary ' | |
187 | 'if your key is prepended to your cert file.') | |
162 | parser.add_argument("--os-auth-url", | |
163 | default=cliutils.env("OS_AUTH_URL"), | |
164 | help="Defaults to env[OS_AUTH_URL].") | |
165 | ||
166 | parser.add_argument("--os-auth-token", | |
167 | default=cliutils.env("OS_AUTH_TOKEN"), | |
168 | help="Defaults to env[OS_AUTH_TOKEN].") | |
169 | ||
170 | parser.add_argument("--os-cacert", | |
171 | metavar="<ca-certificate-file>", | |
172 | dest="os_cacert", | |
173 | default=cliutils.env("OS_CACERT"), | |
174 | help="Path of CA TLS certificate(s) used to verify" | |
175 | " the remote server\"s certificate. Without this " | |
176 | "option ceilometer looks for the default system CA" | |
177 | " certificates.") | |
178 | ||
179 | parser.add_argument("--os-cert", | |
180 | help="Path of certificate file to use in SSL " | |
181 | "connection. This file can optionally be " | |
182 | "prepended with the private key.") | |
183 | ||
184 | parser.add_argument("--os-key", | |
185 | help="Path of client key to use in SSL " | |
186 | "connection. This option is not necessary " | |
187 | "if your key is prepended to your cert file.") | |
188 | 188 | |
189 | 189 | # Service Catalog related options |
190 | parser.add_argument('--os-service-type', | |
191 | default=cliutils.env('OS_SERVICE_TYPE'), | |
192 | help='Defaults to env[OS_SERVICE_TYPE].') | |
193 | ||
194 | parser.add_argument('--os-endpoint-type', | |
195 | default=cliutils.env('OS_ENDPOINT_TYPE'), | |
196 | help='Defaults to env[OS_ENDPOINT_TYPE].') | |
197 | ||
198 | parser.add_argument('--os-region-name', | |
199 | default=cliutils.env('OS_REGION_NAME'), | |
200 | help='Defaults to env[OS_REGION_NAME].') | |
190 | parser.add_argument("--os-service-type", | |
191 | default=cliutils.env("OS_SERVICE_TYPE"), | |
192 | help="Defaults to env[OS_SERVICE_TYPE].") | |
193 | ||
194 | parser.add_argument("--os-endpoint-type", | |
195 | default=cliutils.env("OS_ENDPOINT_TYPE"), | |
196 | help="Defaults to env[OS_ENDPOINT_TYPE].") | |
197 | ||
198 | parser.add_argument("--os-region-name", | |
199 | default=cliutils.env("OS_REGION_NAME"), | |
200 | help="Defaults to env[OS_REGION_NAME].") | |
201 | 201 | |
202 | 202 | def _append_subcommands(self, parent_parser): |
203 | subcommands = parent_parser.add_subparsers(help='<subcommands>') | |
203 | subcommands = parent_parser.add_subparsers(help="<subcommands>") | |
204 | 204 | for group_cls in commands.BaseCommand.__subclasses__(): |
205 | 205 | group_parser = subcommands.add_parser(group_cls.group_name) |
206 | 206 | subcommand_parser = group_parser.add_subparsers() |
207 | 207 | |
208 | 208 | for name, callback in inspect.getmembers( |
209 | 209 | group_cls(), predicate=inspect.ismethod): |
210 | command = name.replace('_', '-') | |
211 | desc = callback.__doc__ or '' | |
212 | help_message = desc.strip().split('\n')[0] | |
213 | arguments = getattr(callback, 'arguments', []) | |
210 | command = name.replace("_", "-") | |
211 | desc = callback.__doc__ or "" | |
212 | help_message = desc.strip().split("\n")[0] | |
213 | arguments = getattr(callback, "arguments", []) | |
214 | 214 | |
215 | 215 | command_parser = subcommand_parser.add_parser( |
216 | 216 | command, help=help_message, description=desc) |
16 | 16 | min-width: 900px; |
17 | 17 | width: 100%; |
18 | 18 | } |
19 | .trace tr.active-true { | |
19 | .trace tr:hover { | |
20 | 20 | background-color: #D9EDF7!important; |
21 | 21 | } |
22 | 22 | .trace tr td { |
58 | 58 | |
59 | 59 | <div ng-init="hide_children=false"> |
60 | 60 | <table class="trace cursor_pointer_on_hover"> |
61 | <tr class="active-{{hover}}" ng-init="hover=false" ng-mouseenter="hover=true" ng-mouseleave="hover=false"> | |
61 | <tr> | |
62 | 62 | <td class="level" style="padding-left:{{data.level * 5}}px;"> |
63 | 63 | <button type="button" class="btn btn-default btn-xs" ng-disabled="data.is_leaf" ng-click="hide_children=!hide_children"> |
64 | 64 | <span class="glyphicon glyphicon-{{ (data.is_leaf) ? 'cloud' : ((hide_children) ? 'plus': 'minus')}}"></span> |
124 | 124 | var metadata = {}; |
125 | 125 | angular.forEach(info, function(value, key) { |
126 | 126 | var parts = key.split("."); |
127 | if (parts[0] == "info"){ | |
127 | if (parts[0] == "meta"){ | |
128 | 128 | |
129 | if (parts.length != 2){ | |
130 | this[key] = value; | |
129 | if (parts.length == 2){ | |
130 | this[parts[1]] = value; | |
131 | 131 | } |
132 | 132 | else{ |
133 | var group_name = parts[1].split(":"); | |
134 | if (group_name.length == 2){ | |
135 | if (!(group_name[0] in this)) | |
136 | this[group_name[0]] = {}; | |
133 | var group_name = parts[1]; | |
134 | if (!(group_name in this)) | |
135 | this[group_name] = {}; | |
137 | 136 | |
138 | this[group_name[0]][group_name[1]] = value; | |
139 | } | |
137 | this[group_name][parts[2]] = value; | |
140 | 138 | } |
141 | 139 | }; |
142 | 140 | }, metadata); |
189 | 187 | |
190 | 188 | </body> |
191 | 189 | |
192 | </html>⏎ | |
190 | </html> |
54 | 54 | :param *args: args that will be passed to plugin init method |
55 | 55 | :param **kwargs: kwargs that will be passed to plugin init method |
56 | 56 | :returns: Callable notifier method |
57 | :raise TypeError: In case of invalid name of plugin raises TypeError | |
57 | :raises TypeError: In case of invalid name of plugin raises TypeError | |
58 | 58 | """ |
59 | 59 | return base.Notifier.factory(plugin_name, *args, **kwargs) |
11 | 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
12 | 12 | # License for the specific language governing permissions and limitations |
13 | 13 | # under the License. |
14 | ||
15 | 14 | |
16 | 15 | import datetime |
17 | 16 | |
55 | 54 | finished_at = 0 |
56 | 55 | |
57 | 56 | for n in notifications: |
58 | meta = n["metadata"] | |
59 | key = meta["trace_id"] | |
57 | traits = n["traits"] | |
60 | 58 | |
61 | if key not in result: | |
62 | result[key] = { | |
59 | def find_field(f_name): | |
60 | return [t["value"] for t in traits if t["name"] == f_name][0] | |
61 | ||
62 | trace_id = find_field("trace_id") | |
63 | parent_id = find_field("parent_id") | |
64 | name = find_field("name") | |
65 | project = find_field("project") | |
66 | service = find_field("service") | |
67 | host = find_field("host") | |
68 | timestamp = find_field("timestamp") | |
69 | ||
70 | try: | |
71 | timestamp = datetime.datetime.strptime(timestamp, | |
72 | "%Y-%m-%dT%H:%M:%S.%f") | |
73 | except ValueError: | |
74 | timestamp = datetime.datetime.strptime(timestamp, | |
75 | "%Y-%m-%dT%H:%M:%S") | |
76 | ||
77 | if trace_id not in result: | |
78 | result[trace_id] = { | |
63 | 79 | "info": { |
64 | "name": meta["name"].split("-")[0] | |
80 | "name": name.split("-")[0], | |
81 | "project": project, | |
82 | "service": service, | |
83 | "meta.host": host, | |
84 | "host": host, | |
65 | 85 | }, |
66 | "parent_id": meta["parent_id"], | |
67 | "trace_id": meta["trace_id"] | |
86 | "trace_id": trace_id, | |
87 | "parent_id": parent_id, | |
68 | 88 | } |
69 | 89 | |
70 | skip_keys = ["base_id", "trace_id", "parent_id", "name", "event_type"] | |
90 | skip_keys = ["base_id", "trace_id", "parent_id", | |
91 | "name", "project", "service", "host", "timestamp"] | |
71 | 92 | |
72 | for k in meta: | |
73 | if k not in skip_keys: | |
74 | result[key]["info"][k] = meta[k] | |
93 | for k in traits: | |
94 | if k["name"] not in skip_keys: | |
95 | result[trace_id]["info"]["meta.%s" % k["name"]] = k["value"] | |
75 | 96 | |
76 | try: | |
77 | timestamp = datetime.datetime.strptime(n["timestamp"], | |
78 | "%Y-%m-%dT%H:%M:%S.%f") | |
79 | except ValueError: | |
80 | timestamp = datetime.datetime.strptime(n["timestamp"], | |
81 | "%Y-%m-%dT%H:%M:%S") | |
82 | ||
83 | if meta["name"].endswith("stop"): | |
84 | result[key]["info"]["finished"] = timestamp | |
97 | if name.endswith("stop"): | |
98 | result[trace_id]["info"]["finished"] = timestamp | |
85 | 99 | else: |
86 | result[key]["info"]["started"] = timestamp | |
100 | result[trace_id]["info"]["started"] = timestamp | |
87 | 101 | |
88 | 102 | if not started_at or started_at > timestamp: |
89 | 103 | started_at = timestamp |
95 | 109 | # NOTE(boris-42): Unfortunately this is the simplest way that works in |
96 | 110 | # py26 and py27 |
97 | 111 | microsec = (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 1e6) |
98 | return (int)(microsec / 1000.0) | |
112 | return int(microsec / 1000.0) | |
99 | 113 | |
100 | 114 | for r in result.values(): |
101 | 115 | # NOTE(boris-42): We are not able to guarantee that ceilometer consumed |
125 | 139 | :param base_id: Base id of trace elements. |
126 | 140 | """ |
127 | 141 | |
128 | _filter = '{"=": {"resource_id": "profiler-%s"}}' % base_id | |
142 | _filter = [{"field": "base_id", "op": "eq", "value": base_id}] | |
143 | # limit is hardcoded in this code state. Later that will be changed via | |
144 | # connection string usage | |
129 | 145 | return [n.to_dict() |
130 | for n in ceilometer.query_samples.query(_filter, None, None)] | |
146 | for n in ceilometer.events.list(_filter, limit=100000)] |
13 | 13 | # under the License. |
14 | 14 | |
15 | 15 | import collections |
16 | import datetime | |
16 | 17 | import functools |
17 | 18 | import inspect |
19 | import socket | |
18 | 20 | import threading |
19 | 21 | import uuid |
22 | ||
23 | import six | |
20 | 24 | |
21 | 25 | from osprofiler import notifier |
22 | 26 | |
150 | 154 | return decorator |
151 | 155 | |
152 | 156 | |
157 | class TracedMeta(type): | |
158 | """Metaclass to comfortably trace all children of a specific class. | |
159 | ||
160 | Possible usage: | |
161 | ||
162 | >>> @six.add_metaclass(profiler.TracedMeta) | |
163 | >>> class RpcManagerClass(object): | |
164 | >>> __trace_args__ = {'name': 'rpc', | |
165 | >>> 'info': None, | |
166 | >>> 'hide_args': False, | |
167 | >>> 'trace_private': False} | |
168 | >>> | |
169 | >>> def my_method(self, some_args): | |
170 | >>> pass | |
171 | >>> | |
172 | >>> def my_method2(self, some_arg1, some_arg2, kw=None, kw2=None) | |
173 | >>> pass | |
174 | ||
175 | Adding of this metaclass requires to set __trace_args__ attribute to the | |
176 | class we want to modify. __trace_args__ is the dictionary with one | |
177 | mandatory key included - "name", that will define name of action to be | |
178 | traced - E.g. wsgi, rpc, db, etc... | |
179 | """ | |
180 | def __init__(cls, cls_name, bases, attrs): | |
181 | super(TracedMeta, cls).__init__(cls_name, bases, attrs) | |
182 | ||
183 | trace_args = dict(getattr(cls, "__trace_args__", {})) | |
184 | trace_private = trace_args.pop("trace_private", False) | |
185 | if "name" not in trace_args: | |
186 | raise TypeError("Please specify __trace_args__ class level " | |
187 | "dictionary attribute with mandatory 'name' key - " | |
188 | "e.g. __trace_args__ = {'name': 'rpc'}") | |
189 | ||
190 | for attr_name, attr_value in six.iteritems(attrs): | |
191 | if not (inspect.ismethod(attr_value) or | |
192 | inspect.isfunction(attr_value)): | |
193 | continue | |
194 | if attr_name.startswith("__"): | |
195 | continue | |
196 | if not trace_private and attr_name.startswith("_"): | |
197 | continue | |
198 | ||
199 | setattr(cls, attr_name, trace(**trace_args)(getattr(cls, | |
200 | attr_name))) | |
201 | ||
202 | ||
153 | 203 | class Trace(object): |
154 | 204 | |
155 | 205 | def __init__(self, name, info=None): |
196 | 246 | base_id = str(uuid.uuid4()) |
197 | 247 | self._trace_stack = collections.deque([base_id, parent_id or base_id]) |
198 | 248 | self._name = collections.deque() |
249 | self._host = socket.gethostname() | |
199 | 250 | |
200 | 251 | def get_base_id(self): |
201 | """Return base if of trace. | |
252 | """Return base id of a trace. | |
202 | 253 | |
203 | 254 | Base id is the same for all elements in one trace. It's main goal is |
204 | 255 | to be able to retrieve by one request all trace elements from storage. |
223 | 274 | trace_id - current event id. |
224 | 275 | |
225 | 276 | As we are writing this code special for OpenStack, and there will be |
226 | only one implementation of notifier based on ceilometer notifer api. | |
277 | only one implementation of notifier based on ceilometer notifier api. | |
227 | 278 | That already contains timestamps, so we don't measure time by hand. |
228 | 279 | |
229 | 280 | :param name: name of trace element (db, wsgi, rpc, etc..) |
231 | 282 | trace element. (sql request, rpc message or url...) |
232 | 283 | """ |
233 | 284 | |
285 | info = info or {} | |
286 | info["host"] = self._host | |
234 | 287 | self._name.append(name) |
235 | 288 | self._trace_stack.append(str(uuid.uuid4())) |
236 | 289 | self._notify("%s-start" % name, info) |
242 | 295 | |
243 | 296 | :param info: Dict with useful info. It will be send in notification. |
244 | 297 | """ |
245 | self._notify('%s-stop' % self._name.pop(), info) | |
298 | info = info or {} | |
299 | info["host"] = self._host | |
300 | self._notify("%s-stop" % self._name.pop(), info) | |
246 | 301 | self._trace_stack.pop() |
247 | 302 | |
248 | 303 | def _notify(self, name, info): |
250 | 305 | "name": name, |
251 | 306 | "base_id": self.get_base_id(), |
252 | 307 | "trace_id": self.get_id(), |
253 | "parent_id": self.get_parent_id() | |
308 | "parent_id": self.get_parent_id(), | |
309 | "timestamp": datetime.datetime.utcnow(), | |
254 | 310 | } |
255 | 311 | if info: |
256 | 312 | payload["info"] = info |
35 | 35 | """Add tracing to all sqlalchemy calls.""" |
36 | 36 | |
37 | 37 | if not _DISABLED: |
38 | sqlalchemy.event.listen(engine, 'before_cursor_execute', | |
38 | sqlalchemy.event.listen(engine, "before_cursor_execute", | |
39 | 39 | _before_cursor_execute(name)) |
40 | sqlalchemy.event.listen(engine, 'after_cursor_execute', | |
40 | sqlalchemy.event.listen(engine, "after_cursor_execute", | |
41 | 41 | _after_cursor_execute()) |
42 | 42 | |
43 | 43 | |
45 | 45 | """Add listener that will send trace info before query is executed.""" |
46 | 46 | |
47 | 47 | def handler(conn, cursor, statement, params, context, executemany): |
48 | info = {"db.statement": statement, "db.params": params} | |
48 | info = {"db": { | |
49 | "statement": statement, | |
50 | "params": params} | |
51 | } | |
49 | 52 | profiler.start(name, info=info) |
50 | 53 | |
51 | 54 | return handler |
38 | 38 | return {} |
39 | 39 | |
40 | 40 | |
41 | _DISABLED = False | |
41 | _ENABLED = None | |
42 | 42 | _HMAC_KEYS = None |
43 | 43 | |
44 | 44 | |
48 | 48 | This is the alternative way to disable middleware. It will be used to be |
49 | 49 | able to disable middleware via oslo.config. |
50 | 50 | """ |
51 | global _DISABLED | |
52 | _DISABLED = True | |
51 | global _ENABLED | |
52 | _ENABLED = False | |
53 | 53 | |
54 | 54 | |
55 | 55 | def enable(hmac_keys=None): |
56 | 56 | """Enable middleware.""" |
57 | global _DISABLED, _HMAC_KEYS | |
58 | _DISABLED = False | |
57 | global _ENABLED, _HMAC_KEYS | |
58 | _ENABLED = True | |
59 | 59 | _HMAC_KEYS = utils.split(hmac_keys or "") |
60 | 60 | |
61 | 61 | |
96 | 96 | |
97 | 97 | @webob.dec.wsgify |
98 | 98 | def __call__(self, request): |
99 | if _DISABLED or not self.enabled: | |
99 | if (_ENABLED is not None and not _ENABLED or | |
100 | _ENABLED is None and not self.enabled): | |
100 | 101 | return request.get_response(self.application) |
101 | 102 | |
102 | 103 | trace_info = utils.signed_unpack(request.headers.get("X-Trace-Info"), |
109 | 110 | profiler.init(**trace_info) |
110 | 111 | info = { |
111 | 112 | "request": { |
112 | "host_url": request.host_url, | |
113 | 113 | "path": request.path, |
114 | 114 | "query": request.query_string, |
115 | 115 | "method": request.method, |
0 | hacking>=0.8.0,<0.9 | |
0 | hacking>=0.10.2,<0.11 | |
1 | 1 | |
2 | 2 | coverage>=3.6 |
3 | 3 | discover |
4 | mock>=1.0 | |
4 | mock>=1.2 | |
5 | 5 | python-subunit>=0.0.18 |
6 | 6 | testrepository>=0.0.18 |
7 | testtools>=0.9.34 | |
7 | testtools>=1.4.0 | |
8 | 8 | |
9 | oslosphinx | |
10 | sphinx>=1.1.2,!=1.2.0,<1.3 | |
9 | oslosphinx>=2.5.0,!=3.4.0 # Apache-2.0 | |
10 | sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3 |
138 | 138 | pass |
139 | 139 | |
140 | 140 | self.ceiloclient.client.get_client.side_effect = FakeException |
141 | msg = "Something has gone wrong. See logs for more details." | |
141 | msg = "Something has gone wrong. See logs for more details" | |
142 | 142 | self._test_with_command_error("trace show fake_id", msg) |
143 | 143 | |
144 | 144 | @mock.patch("osprofiler.parsers.ceilometer.get_notifications") |
187 | 187 | "started": 0, "finished": 0, "name": "total"}, "children": []} |
188 | 188 | mock_notifications.return_value = notifications |
189 | 189 | |
190 | #NOTE(akurilin): to simplify assert statement, html-template should be | |
190 | # NOTE(akurilin): to simplify assert statement, html-template should be | |
191 | 191 | # replaced. |
192 | 192 | html_template = ( |
193 | 193 | "A long time ago in a galaxy far, far away..." |
0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
1 | # not use this file except in compliance with the License. You may obtain | |
2 | # a copy of the License at | |
3 | # | |
4 | # http://www.apache.org/licenses/LICENSE-2.0 | |
5 | # | |
6 | # Unless required by applicable law or agreed to in writing, software | |
7 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
8 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
9 | # License for the specific language governing permissions and limitations | |
10 | # under the License. | |
11 | ||
12 | """ | |
13 | Guidelines for writing new hacking checks | |
14 | ||
15 | - Use only for OSProfiler specific tests. OpenStack general tests | |
16 | should be submitted to the common 'hacking' module. | |
17 | - Pick numbers in the range N3xx. Find the current test with | |
18 | the highest allocated number and then pick the next value. | |
19 | - Keep the test method code in the source file ordered based | |
20 | on the N3xx value. | |
21 | - List the new rule in the top level HACKING.rst file | |
22 | - Add test cases for each new rule to tests/unit/test_hacking.py | |
23 | ||
24 | """ | |
25 | ||
26 | import functools | |
27 | import re | |
28 | import tokenize | |
29 | ||
30 | re_assert_true_instance = re.compile( | |
31 | r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " | |
32 | r"(\w|\.|\'|\"|\[|\])+\)\)") | |
33 | re_assert_equal_type = re.compile( | |
34 | r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " | |
35 | r"(\w|\.|\'|\"|\[|\])+\)") | |
36 | re_assert_equal_end_with_none = re.compile(r"assertEqual\(.*?,\s+None\)$") | |
37 | re_assert_equal_start_with_none = re.compile(r"assertEqual\(None,") | |
38 | re_assert_true_false_with_in_or_not_in = re.compile( | |
39 | r"assert(True|False)\(" | |
40 | r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)") | |
41 | re_assert_true_false_with_in_or_not_in_spaces = re.compile( | |
42 | r"assert(True|False)\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+" | |
43 | r"[\[|'|\"](, .*)?\)") | |
44 | re_assert_equal_in_end_with_true_or_false = re.compile( | |
45 | r"assertEqual\((\w|[][.'\"])+( not)? in (\w|[][.'\", ])+, (True|False)\)") | |
46 | re_assert_equal_in_start_with_true_or_false = re.compile( | |
47 | r"assertEqual\((True|False), (\w|[][.'\"])+( not)? in (\w|[][.'\", ])+\)") | |
48 | re_no_construct_dict = re.compile( | |
49 | r"\sdict\(\)") | |
50 | re_no_construct_list = re.compile( | |
51 | r"\slist\(\)") | |
52 | re_str_format = re.compile(r""" | |
53 | % # start of specifier | |
54 | \(([^)]+)\) # mapping key, in group 1 | |
55 | [#0 +\-]? # optional conversion flag | |
56 | (?:-?\d*)? # optional minimum field width | |
57 | (?:\.\d*)? # optional precision | |
58 | [hLl]? # optional length modifier | |
59 | [A-z%] # conversion modifier | |
60 | """, re.X) | |
61 | re_raises = re.compile( | |
62 | r"\s:raise[^s] *.*$|\s:raises *:.*$|\s:raises *[^:]+$") | |
63 | ||
64 | ||
65 | def skip_ignored_lines(func): | |
66 | ||
67 | @functools.wraps(func) | |
68 | def wrapper(logical_line, filename): | |
69 | line = logical_line.strip() | |
70 | if not line or line.startswith("#") or line.endswith("# noqa"): | |
71 | return | |
72 | yield next(func(logical_line, filename)) | |
73 | ||
74 | return wrapper | |
75 | ||
76 | ||
77 | def _parse_assert_mock_str(line): | |
78 | point = line.find(".assert_") | |
79 | ||
80 | if point != -1: | |
81 | end_pos = line[point:].find("(") + point | |
82 | return point, line[point + 1: end_pos], line[: point] | |
83 | else: | |
84 | return None, None, None | |
85 | ||
86 | ||
87 | @skip_ignored_lines | |
88 | def check_assert_methods_from_mock(logical_line, filename): | |
89 | """Ensure that ``assert_*`` methods from ``mock`` library is used correctly | |
90 | ||
91 | N301 - base error number | |
92 | N302 - related to nonexistent "assert_called" | |
93 | N303 - related to nonexistent "assert_called_once" | |
94 | """ | |
95 | ||
96 | correct_names = ["assert_any_call", "assert_called_once_with", | |
97 | "assert_called_with", "assert_has_calls"] | |
98 | ignored_files = ["./tests/unit/test_hacking.py"] | |
99 | ||
100 | if filename.startswith("./tests") and filename not in ignored_files: | |
101 | pos, method_name, obj_name = _parse_assert_mock_str(logical_line) | |
102 | ||
103 | if pos: | |
104 | if method_name not in correct_names: | |
105 | error_number = "N301" | |
106 | msg = ("%(error_number)s:'%(method)s' is not present in `mock`" | |
107 | " library. %(custom_msg)s For more details, visit " | |
108 | "http://www.voidspace.org.uk/python/mock/ .") | |
109 | ||
110 | if method_name == "assert_called": | |
111 | error_number = "N302" | |
112 | custom_msg = ("Maybe, you should try to use " | |
113 | "'assertTrue(%s.called)' instead." % | |
114 | obj_name) | |
115 | elif method_name == "assert_called_once": | |
116 | # For more details, see a bug in Rally: | |
117 | # https://bugs.launchpad.net/rally/+bug/1305991 | |
118 | error_number = "N303" | |
119 | custom_msg = ("Maybe, you should try to use " | |
120 | "'assertEqual(1, %s.call_count)' " | |
121 | "or '%s.assert_called_once_with()'" | |
122 | " instead." % (obj_name, obj_name)) | |
123 | else: | |
124 | custom_msg = ("Correct 'assert_*' methods: '%s'." | |
125 | % "', '".join(correct_names)) | |
126 | ||
127 | yield (pos, msg % { | |
128 | "error_number": error_number, | |
129 | "method": method_name, | |
130 | "custom_msg": custom_msg}) | |
131 | ||
132 | ||
133 | @skip_ignored_lines | |
134 | def assert_true_instance(logical_line, filename): | |
135 | """Check for assertTrue(isinstance(a, b)) sentences | |
136 | ||
137 | N320 | |
138 | """ | |
139 | if re_assert_true_instance.match(logical_line): | |
140 | yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, " | |
141 | "you should use assertIsInstance(a, b) instead.") | |
142 | ||
143 | ||
144 | @skip_ignored_lines | |
145 | def assert_equal_type(logical_line, filename): | |
146 | """Check for assertEqual(type(A), B) sentences | |
147 | ||
148 | N321 | |
149 | """ | |
150 | if re_assert_equal_type.match(logical_line): | |
151 | yield (0, "N321 assertEqual(type(A), B) sentences not allowed, " | |
152 | "you should use assertIsInstance(a, b) instead.") | |
153 | ||
154 | ||
155 | @skip_ignored_lines | |
156 | def assert_equal_none(logical_line, filename): | |
157 | """Check for assertEqual(A, None) or assertEqual(None, A) sentences | |
158 | ||
159 | N322 | |
160 | """ | |
161 | res = (re_assert_equal_start_with_none.search(logical_line) or | |
162 | re_assert_equal_end_with_none.search(logical_line)) | |
163 | if res: | |
164 | yield (0, "N322 assertEqual(A, None) or assertEqual(None, A) " | |
165 | "sentences not allowed, you should use assertIsNone(A) " | |
166 | "instead.") | |
167 | ||
168 | ||
169 | @skip_ignored_lines | |
170 | def assert_true_or_false_with_in(logical_line, filename): | |
171 | """Check assertTrue/False(A in/not in B) with collection contents | |
172 | ||
173 | Check for assertTrue/False(A in B), assertTrue/False(A not in B), | |
174 | assertTrue/False(A in B, message) or assertTrue/False(A not in B, message) | |
175 | sentences. | |
176 | ||
177 | N323 | |
178 | """ | |
179 | res = (re_assert_true_false_with_in_or_not_in.search(logical_line) or | |
180 | re_assert_true_false_with_in_or_not_in_spaces.search(logical_line)) | |
181 | if res: | |
182 | yield (0, "N323 assertTrue/assertFalse(A in/not in B)sentences not " | |
183 | "allowed, you should use assertIn(A, B) or assertNotIn(A, B)" | |
184 | " instead.") | |
185 | ||
186 | ||
187 | @skip_ignored_lines | |
188 | def assert_equal_in(logical_line, filename): | |
189 | """Check assertEqual(A in/not in B, True/False) with collection contents | |
190 | ||
191 | Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B), | |
192 | assertEqual(A not in B, True/False) or assertEqual(True/False, A not in B) | |
193 | sentences. | |
194 | ||
195 | N324 | |
196 | """ | |
197 | res = (re_assert_equal_in_end_with_true_or_false.search(logical_line) or | |
198 | re_assert_equal_in_start_with_true_or_false.search(logical_line)) | |
199 | if res: | |
200 | yield (0, "N324: Use assertIn/NotIn(A, B) rather than " | |
201 | "assertEqual(A in/not in B, True/False) when checking " | |
202 | "collection contents.") | |
203 | ||
204 | ||
205 | @skip_ignored_lines | |
206 | def check_quotes(logical_line, filename): | |
207 | """Check that single quotation marks are not used | |
208 | ||
209 | N350 | |
210 | """ | |
211 | ||
212 | in_string = False | |
213 | in_multiline_string = False | |
214 | single_quotas_are_used = False | |
215 | ||
216 | check_tripple = ( | |
217 | lambda line, i, char: ( | |
218 | i + 2 < len(line) and | |
219 | (char == line[i] == line[i + 1] == line[i + 2]) | |
220 | ) | |
221 | ) | |
222 | ||
223 | i = 0 | |
224 | while i < len(logical_line): | |
225 | char = logical_line[i] | |
226 | ||
227 | if in_string: | |
228 | if char == "\"": | |
229 | in_string = False | |
230 | if char == "\\": | |
231 | i += 1 # ignore next char | |
232 | ||
233 | elif in_multiline_string: | |
234 | if check_tripple(logical_line, i, "\""): | |
235 | i += 2 # skip next 2 chars | |
236 | in_multiline_string = False | |
237 | ||
238 | elif char == "#": | |
239 | break | |
240 | ||
241 | elif char == "'": | |
242 | single_quotas_are_used = True | |
243 | break | |
244 | ||
245 | elif char == "\"": | |
246 | if check_tripple(logical_line, i, "\""): | |
247 | in_multiline_string = True | |
248 | i += 3 | |
249 | continue | |
250 | in_string = True | |
251 | ||
252 | i += 1 | |
253 | ||
254 | if single_quotas_are_used: | |
255 | yield (i, "N350 Remove Single quotes") | |
256 | ||
257 | ||
258 | @skip_ignored_lines | |
259 | def check_no_constructor_data_struct(logical_line, filename): | |
260 | """Check that data structs (lists, dicts) are declared using literals | |
261 | ||
262 | N351 | |
263 | """ | |
264 | ||
265 | match = re_no_construct_dict.search(logical_line) | |
266 | if match: | |
267 | yield (0, "N351 Remove dict() construct and use literal {}") | |
268 | match = re_no_construct_list.search(logical_line) | |
269 | if match: | |
270 | yield (0, "N351 Remove list() construct and use literal []") | |
271 | ||
272 | ||
273 | def check_dict_formatting_in_string(logical_line, tokens): | |
274 | """Check that strings do not use dict-formatting with a single replacement | |
275 | ||
276 | N352 | |
277 | """ | |
278 | # NOTE(stpierre): Can't use @skip_ignored_lines here because it's | |
279 | # a stupid decorator that only works on functions that take | |
280 | # (logical_line, filename) as arguments. | |
281 | if (not logical_line or | |
282 | logical_line.startswith("#") or | |
283 | logical_line.endswith("# noqa")): | |
284 | return | |
285 | ||
286 | current_string = "" | |
287 | in_string = False | |
288 | for token_type, text, start, end, line in tokens: | |
289 | if token_type == tokenize.STRING: | |
290 | if not in_string: | |
291 | current_string = "" | |
292 | in_string = True | |
293 | current_string += text.strip("\"") | |
294 | elif token_type == tokenize.OP: | |
295 | if not current_string: | |
296 | continue | |
297 | # NOTE(stpierre): The string formatting operator % has | |
298 | # lower precedence than +, so we assume that the logical | |
299 | # string has concluded whenever we hit an operator of any | |
300 | # sort. (Most operators don't work for strings anyway.) | |
301 | # Some string operators do have higher precedence than %, | |
302 | # though, so you can technically trick this check by doing | |
303 | # things like: | |
304 | # | |
305 | # "%(foo)s" * 1 % {"foo": 1} | |
306 | # "%(foo)s"[:] % {"foo": 1} | |
307 | # | |
308 | # It also will produce false positives if you use explicit | |
309 | # parenthesized addition for two strings instead of | |
310 | # concatenation by juxtaposition, e.g.: | |
311 | # | |
312 | # ("%(foo)s" + "%(bar)s") % vals | |
313 | # | |
314 | # But if you do any of those things, then you deserve all | |
315 | # of the horrible things that happen to you, and probably | |
316 | # many more. | |
317 | in_string = False | |
318 | if text == "%": | |
319 | format_keys = set() | |
320 | for match in re_str_format.finditer(current_string): | |
321 | format_keys.add(match.group(1)) | |
322 | if len(format_keys) == 1: | |
323 | yield (0, | |
324 | "N353 Do not use mapping key string formatting " | |
325 | "with a single key") | |
326 | if text != ")": | |
327 | # NOTE(stpierre): You can have a parenthesized string | |
328 | # followed by %, so a closing paren doesn't obviate | |
329 | # the possibility for a substitution operator like | |
330 | # every other operator does. | |
331 | current_string = "" | |
332 | elif token_type in (tokenize.NL, tokenize.COMMENT): | |
333 | continue | |
334 | else: | |
335 | in_string = False | |
336 | if token_type == tokenize.NEWLINE: | |
337 | current_string = "" | |
338 | ||
339 | ||
340 | @skip_ignored_lines | |
341 | def check_using_unicode(logical_line, filename): | |
342 | """Check crosspython unicode usage | |
343 | ||
344 | N353 | |
345 | """ | |
346 | ||
347 | if re.search(r"\bunicode\(", logical_line): | |
348 | yield (0, "N353 'unicode' function is absent in python3. Please " | |
349 | "use 'six.text_type' instead.") | |
350 | ||
351 | ||
352 | def check_raises(physical_line, filename): | |
353 | """Check raises usage | |
354 | ||
355 | N354 | |
356 | """ | |
357 | ||
358 | ignored_files = ["./tests/unit/test_hacking.py", | |
359 | "./tests/hacking/checks.py"] | |
360 | if filename not in ignored_files: | |
361 | if re_raises.search(physical_line): | |
362 | return (0, "N354 ':Please use ':raises Exception: conditions' " | |
363 | "in docstrings.") | |
364 | ||
365 | ||
366 | def factory(register): | |
367 | register(check_assert_methods_from_mock) | |
368 | register(assert_true_instance) | |
369 | register(assert_equal_type) | |
370 | register(assert_equal_none) | |
371 | register(assert_true_or_false_with_in) | |
372 | register(assert_equal_in) | |
373 | register(check_quotes) | |
374 | register(check_no_constructor_data_struct) | |
375 | register(check_dict_formatting_in_string) | |
376 | register(check_using_unicode) | |
377 | register(check_raises) |
20 | 20 | |
21 | 21 | |
22 | 22 | class CeilometerParserTestCase(test.TestCase): |
23 | ||
24 | 23 | def test_build_empty_tree(self): |
25 | 24 | self.assertEqual(ceilometer._build_tree({}), []) |
26 | 25 | |
87 | 86 | self.assertEqual(ceilometer.parse_notifications([]), expected) |
88 | 87 | |
89 | 88 | def test_parse_notifications(self): |
90 | samples = [ | |
91 | { | |
92 | "id": "896f5e52-d4c9-11e3-a117-46c0b36ac153", | |
93 | "metadata": { | |
94 | "base_id": "f5587500-07d1-41a0-b434-525d3c28ac49", | |
95 | "event_type": "profiler.nova", | |
96 | "host": "0.0.0.0", | |
97 | "service": "osapi_compute", | |
98 | "project": "nova", | |
99 | "name": "WSGI-stop", | |
100 | "parent_id": "82281b35-63aa-45fc-8578-5a32a66370ab", | |
101 | "trace_id": "837eb0bd-323a-4e3f-b223-3be78ad86aab" | |
102 | }, | |
103 | "meter": "WSGI-stop", | |
104 | "project_id": None, | |
105 | "recorded_at": "2014-05-06T02:53:03.110724", | |
106 | "resource_id": "profiler-f5587500-07d1-41a0-b434-525d3c28ac49", | |
107 | "source": "openstack", | |
108 | "timestamp": "2014-05-06T02:52:59.357020", | |
109 | "type": "gauge", | |
110 | "unit": "sample", | |
111 | "user_id": None, | |
112 | "volume": 1.0 | |
89 | events = [ | |
90 | { | |
91 | "traits": [ | |
92 | { | |
93 | "type": "string", | |
94 | "name": "base_id", | |
95 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
96 | }, | |
97 | { | |
98 | "type": "string", | |
99 | "name": "host", | |
100 | "value": "ubuntu" | |
101 | }, | |
102 | { | |
103 | "type": "string", | |
104 | "name": "method", | |
105 | "value": "POST" | |
106 | }, | |
107 | { | |
108 | "type": "string", | |
109 | "name": "name", | |
110 | "value": "wsgi-start" | |
111 | }, | |
112 | { | |
113 | "type": "string", | |
114 | "name": "parent_id", | |
115 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
116 | }, | |
117 | { | |
118 | "type": "string", | |
119 | "name": "project", | |
120 | "value": "keystone" | |
121 | }, | |
122 | { | |
123 | "type": "string", | |
124 | "name": "service", | |
125 | "value": "main" | |
126 | }, | |
127 | { | |
128 | "type": "string", | |
129 | "name": "timestamp", | |
130 | "value": "2015-12-23T14:02:22.338776" | |
131 | }, | |
132 | { | |
133 | "type": "string", | |
134 | "name": "trace_id", | |
135 | "value": "06320327-2c2c-45ae-923a-515de890276a" | |
136 | } | |
137 | ], | |
138 | "raw": {}, | |
139 | "generated": "2015-12-23T10:41:38.415793", | |
140 | "event_type": "profiler.main", | |
141 | "message_id": "65fc1553-3082-4a6f-9d1e-0e3183f57a47"}, | |
142 | { | |
143 | "traits": | |
144 | [ | |
145 | { | |
146 | "type": "string", | |
147 | "name": "base_id", | |
148 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
149 | }, | |
150 | { | |
151 | "type": "string", | |
152 | "name": "host", | |
153 | "value": "ubuntu" | |
154 | }, | |
155 | { | |
156 | "type": "string", | |
157 | "name": "name", | |
158 | "value": "wsgi-stop" | |
159 | }, | |
160 | { | |
161 | "type": "string", | |
162 | "name": "parent_id", | |
163 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
164 | }, | |
165 | { | |
166 | "type": "string", | |
167 | "name": "project", | |
168 | "value": "keystone" | |
169 | }, | |
170 | { | |
171 | "type": "string", | |
172 | "name": "service", | |
173 | "value": "main" | |
174 | }, | |
175 | { | |
176 | "type": "string", | |
177 | "name": "timestamp", | |
178 | "value": "2015-12-23T14:02:22.380405" | |
179 | }, | |
180 | { | |
181 | "type": "string", | |
182 | "name": "trace_id", | |
183 | "value": "016c97fd-87f3-40b2-9b55-e431156b694b" | |
184 | } | |
185 | ], | |
186 | "raw": {}, | |
187 | "generated": "2015-12-23T10:41:38.406052", | |
188 | "event_type": "profiler.main", | |
189 | "message_id": "3256d9f1-48ba-4ac5-a50b-64fa42c6e264"}, | |
190 | { | |
191 | "traits": | |
192 | [ | |
193 | { | |
194 | "type": "string", | |
195 | "name": "base_id", | |
196 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
197 | }, | |
198 | { | |
199 | "type": "string", | |
200 | "name": "db.params", | |
201 | "value": "[]" | |
202 | }, | |
203 | { | |
204 | "type": "string", | |
205 | "name": "db.statement", | |
206 | "value": "SELECT 1" | |
207 | }, | |
208 | { | |
209 | "type": "string", | |
210 | "name": "host", | |
211 | "value": "ubuntu" | |
212 | }, | |
213 | { | |
214 | "type": "string", | |
215 | "name": "name", | |
216 | "value": "db-start" | |
217 | }, | |
218 | { | |
219 | "type": "string", | |
220 | "name": "parent_id", | |
221 | "value": "06320327-2c2c-45ae-923a-515de890276a" | |
222 | }, | |
223 | { | |
224 | "type": "string", | |
225 | "name": "project", | |
226 | "value": "keystone" | |
227 | }, | |
228 | { | |
229 | "type": "string", | |
230 | "name": "service", | |
231 | "value": "main" | |
232 | }, | |
233 | { | |
234 | "type": "string", | |
235 | "name": "timestamp", | |
236 | "value": "2015-12-23T14:02:22.395365" | |
237 | }, | |
238 | { | |
239 | "type": "string", | |
240 | "name": "trace_id", | |
241 | "value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a" | |
242 | } | |
243 | ], | |
244 | "raw": {}, | |
245 | "generated": "2015-12-23T10:41:38.984161", | |
246 | "event_type": "profiler.main", | |
247 | "message_id": "60368aa4-16f0-4f37-a8fb-89e92fdf36ff" | |
113 | 248 | }, |
114 | 249 | { |
115 | "id": "895043a0-d4c9-11e3-a117-46c0b36ac153", | |
116 | "metadata": { | |
117 | "base_id": "f5587500-07d1-41a0-b434-525d3c28ac49", | |
118 | "event_type": "profiler.nova", | |
119 | "host": "0.0.0.0", | |
120 | "service": "osapi_compute", | |
121 | "project": "nova", | |
122 | "name": "WSGI-start", | |
123 | "parent_id": "82281b35-63aa-45fc-8578-5a32a66370ab", | |
124 | "trace_id": "837eb0bd-323a-4e3f-b223-3be78ad86aab" | |
125 | }, | |
126 | "meter": "WSGI-start", | |
127 | "project_id": None, | |
128 | "recorded_at": "2014-05-06T02:53:03.020620", | |
129 | "resource_id": "profiler-f5587500-07d1-41a0-b434-525d3c28ac49", | |
130 | "source": "openstack", | |
131 | "timestamp": "2014-05-06T02:52:59.225552", | |
132 | "type": "gauge", | |
133 | "unit": "sample", | |
134 | "user_id": None, | |
135 | "volume": 1.0 | |
250 | "traits": | |
251 | [ | |
252 | { | |
253 | "type": "string", | |
254 | "name": "base_id", | |
255 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
256 | }, | |
257 | { | |
258 | "type": "string", | |
259 | "name": "host", | |
260 | "value": "ubuntu" | |
261 | }, | |
262 | { | |
263 | "type": "string", | |
264 | "name": "name", | |
265 | "value": "db-stop" | |
266 | }, | |
267 | { | |
268 | "type": "string", | |
269 | "name": "parent_id", | |
270 | "value": "06320327-2c2c-45ae-923a-515de890276a" | |
271 | }, | |
272 | { | |
273 | "type": "string", | |
274 | "name": "project", | |
275 | "value": "keystone" | |
276 | }, | |
277 | { | |
278 | "type": "string", | |
279 | "name": "service", | |
280 | "value": "main" | |
281 | }, | |
282 | { | |
283 | "type": "string", | |
284 | "name": "timestamp", | |
285 | "value": "2015-12-23T14:02:22.415486" | |
286 | }, | |
287 | { | |
288 | "type": "string", | |
289 | "name": "trace_id", | |
290 | "value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a" | |
291 | } | |
292 | ], | |
293 | "raw": {}, | |
294 | "generated": "2015-12-23T10:41:39.019378", | |
295 | "event_type": "profiler.main", | |
296 | "message_id": "3fbeb339-55c5-4f28-88e4-15bee251dd3d" | |
136 | 297 | }, |
137 | ||
138 | { | |
139 | "id": "89558414-d4c9-11e3-a117-46c0b36ac153", | |
140 | "metadata": { | |
141 | "base_id": "f5587500-07d1-41a0-b434-525d3c28ac49", | |
142 | "event_type": "profiler.nova", | |
143 | "host": "0.0.0.0", | |
144 | "service": "osapi_compute", | |
145 | "project": "nova", | |
146 | "info.db:multiparams": "(immutabledict({}),)", | |
147 | "info.db:params": "{}", | |
148 | "name": "db-start", | |
149 | "parent_id": "837eb0bd-323a-4e3f-b223-3be78ad86aab", | |
150 | "trace_id": "f8ab042e-1085-4df2-9f3a-cfb6390b8090" | |
151 | }, | |
152 | "meter": "db-start", | |
153 | "project_id": None, | |
154 | "recorded_at": "2014-05-06T02:53:03.038692", | |
155 | "resource_id": "profiler-f5587500-07d1-41a0-b434-525d3c28ac49", | |
156 | "source": "openstack", | |
157 | "timestamp": "2014-05-06T02:52:59.273422", | |
158 | "type": "gauge", | |
159 | "unit": "sample", | |
160 | "user_id": None, | |
161 | "volume": 1.0 | |
162 | }, | |
163 | { | |
164 | "id": "892d3018-d4c9-11e3-a117-46c0b36ac153", | |
165 | "metadata": { | |
166 | "base_id": "f5587500-07d1-41a0-b434-525d3c28ac49", | |
167 | "event_type": "profiler.generic", | |
168 | "host": "ubuntu", | |
169 | "service": "nova-conductor", | |
170 | "project": "nova", | |
171 | "name": "db-stop", | |
172 | "parent_id": "aad4748f-99d5-45c8-be0a-4025894bb3db", | |
173 | "trace_id": "8afee05d-0ad2-4515-bd03-db0f2d30eed0" | |
174 | }, | |
175 | "meter": "db-stop", | |
176 | "project_id": None, | |
177 | "recorded_at": "2014-05-06T02:53:02.894015", | |
178 | "resource_id": "profiler-f5587500-07d1-41a0-b434-525d3c28ac49", | |
179 | "source": "openstack", | |
180 | "timestamp": "2014-05-06T02:53:00.473201", | |
181 | "type": "gauge", | |
182 | "unit": "sample", | |
183 | "user_id": None, | |
184 | "volume": 1.0 | |
298 | { | |
299 | "traits": | |
300 | [ | |
301 | { | |
302 | "type": "string", | |
303 | "name": "base_id", | |
304 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
305 | }, | |
306 | { | |
307 | "type": "string", | |
308 | "name": "host", | |
309 | "value": "ubuntu" | |
310 | }, | |
311 | { | |
312 | "type": "string", | |
313 | "name": "method", | |
314 | "value": "GET" | |
315 | }, | |
316 | { | |
317 | "type": "string", | |
318 | "name": "name", | |
319 | "value": "wsgi-start" | |
320 | }, | |
321 | { | |
322 | "type": "string", | |
323 | "name": "parent_id", | |
324 | "value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4" | |
325 | }, | |
326 | { | |
327 | "type": "string", | |
328 | "name": "project", | |
329 | "value": "keystone" | |
330 | }, | |
331 | { | |
332 | "type": "string", | |
333 | "name": "service", | |
334 | "value": "main" | |
335 | }, | |
336 | { | |
337 | "type": "string", | |
338 | "name": "timestamp", | |
339 | "value": "2015-12-23T14:02:22.427444" | |
340 | }, | |
341 | { | |
342 | "type": "string", | |
343 | "name": "trace_id", | |
344 | "value": "016c97fd-87f3-40b2-9b55-e431156b694b" | |
345 | } | |
346 | ], | |
347 | "raw": {}, | |
348 | "generated": "2015-12-23T10:41:38.360409", | |
349 | "event_type": "profiler.main", | |
350 | "message_id": "57b971a9-572f-4f29-9838-3ed2564c6b5b" | |
185 | 351 | } |
186 | 352 | ] |
187 | 353 | |
188 | excepted = { | |
189 | "info": { | |
190 | "finished": 1247, | |
191 | "name": "total", | |
192 | "started": 0 | |
193 | }, | |
194 | "children": [ | |
195 | { | |
196 | "info": { | |
197 | "finished": 131, | |
198 | "host": "0.0.0.0", | |
199 | "service": "osapi_compute", | |
200 | "name": "WSGI", | |
201 | "project": "nova", | |
202 | "started": 0 | |
203 | }, | |
204 | "parent_id": "82281b35-63aa-45fc-8578-5a32a66370ab", | |
205 | "trace_id": "837eb0bd-323a-4e3f-b223-3be78ad86aab", | |
206 | "children": [{ | |
207 | "children": [], | |
208 | "info": { | |
209 | "finished": 47, | |
210 | "host": "0.0.0.0", | |
211 | "service": "osapi_compute", | |
212 | "project": "nova", | |
213 | "info.db:multiparams": "(immutabledict({}),)", | |
214 | "info.db:params": "{}", | |
215 | "name": "db", | |
216 | "started": 47 | |
217 | }, | |
218 | ||
219 | "parent_id": "837eb0bd-323a-4e3f-b223-3be78ad86aab", | |
220 | "trace_id": "f8ab042e-1085-4df2-9f3a-cfb6390b8090" | |
221 | }] | |
222 | }, | |
223 | { | |
224 | "children": [], | |
225 | "info": { | |
226 | "finished": 1247, | |
227 | "host": "ubuntu", | |
228 | "name": "db", | |
229 | "service": "nova-conductor", | |
230 | "project": "nova", | |
231 | "started": 1247 | |
232 | }, | |
233 | "parent_id": "aad4748f-99d5-45c8-be0a-4025894bb3db", | |
234 | "trace_id": "8afee05d-0ad2-4515-bd03-db0f2d30eed0" | |
235 | } | |
236 | ] | |
237 | } | |
238 | ||
239 | self.assertEqual(ceilometer.parse_notifications(samples), excepted) | |
354 | expected = {"children": [ | |
355 | {"children": [ | |
356 | {"children": [], | |
357 | "info": {"finished": 76, | |
358 | "host": "ubuntu", | |
359 | "meta.db.params": "[]", | |
360 | "meta.db.statement": "SELECT 1", | |
361 | "meta.host": "ubuntu", | |
362 | "name": "db", | |
363 | "project": "keystone", | |
364 | "service": "main", | |
365 | "started": 56}, | |
366 | "parent_id": "06320327-2c2c-45ae-923a-515de890276a", | |
367 | "trace_id": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"}], | |
368 | "info": {"finished": 0, | |
369 | "host": "ubuntu", | |
370 | "meta.host": "ubuntu", | |
371 | "meta.method": "POST", | |
372 | "name": "wsgi", | |
373 | "project": "keystone", | |
374 | "service": "main", | |
375 | "started": 0}, | |
376 | "parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4", | |
377 | "trace_id": "06320327-2c2c-45ae-923a-515de890276a"}, | |
378 | {"children": [], | |
379 | "info": {"finished": 41, | |
380 | "host": "ubuntu", | |
381 | "meta.host": "ubuntu", | |
382 | "meta.method": "GET", | |
383 | "name": "wsgi", | |
384 | "project": "keystone", | |
385 | "service": "main", | |
386 | "started": 88}, | |
387 | "parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4", | |
388 | "trace_id": "016c97fd-87f3-40b2-9b55-e431156b694b"}], | |
389 | "info": {"finished": 88, "name": "total", "started": 0}} | |
390 | ||
391 | self.assertEqual(expected, ceilometer.parse_notifications(events)) | |
240 | 392 | |
241 | 393 | def test_get_notifications(self): |
242 | 394 | mock_ceil_client = mock.MagicMock() |
243 | 395 | results = [mock.MagicMock(), mock.MagicMock()] |
244 | mock_ceil_client.query_samples.query.return_value = results | |
396 | mock_ceil_client.events.list.return_value = results | |
245 | 397 | base_id = "10" |
246 | 398 | |
247 | 399 | result = ceilometer.get_notifications(mock_ceil_client, base_id) |
248 | 400 | |
249 | expected_filter = '{"=": {"resource_id": "profiler-%s"}}' % base_id | |
250 | mock_ceil_client.query_samples.query.assert_called_once_with( | |
251 | expected_filter, None, None) | |
401 | expected_filter = [{"field": "base_id", "op": "eq", "value": base_id}] | |
402 | mock_ceil_client.events.list.assert_called_once_with(expected_filter, | |
403 | limit=100000) | |
252 | 404 | self.assertEqual(result, [results[0].to_dict(), results[1].to_dict()]) |
14 | 14 | |
15 | 15 | import collections |
16 | 16 | import copy |
17 | import datetime | |
17 | 18 | import mock |
18 | 19 | import re |
20 | ||
21 | import six | |
19 | 22 | |
20 | 23 | from osprofiler import profiler |
21 | 24 | |
84 | 87 | prof.start("test") |
85 | 88 | self.assertEqual(prof.get_id(), "43") |
86 | 89 | |
90 | @mock.patch("osprofiler.profiler.datetime") | |
87 | 91 | @mock.patch("osprofiler.profiler.uuid.uuid4") |
88 | 92 | @mock.patch("osprofiler.profiler.notifier.notify") |
89 | def test_profiler_start(self, mock_notify, mock_uuid4): | |
93 | def test_profiler_start(self, mock_notify, mock_uuid4, mock_datetime): | |
90 | 94 | mock_uuid4.return_value = "44" |
95 | now = datetime.datetime.utcnow() | |
96 | mock_datetime.datetime.utcnow.return_value = now | |
91 | 97 | |
92 | 98 | info = {"some": "info"} |
93 | 99 | payload = { |
95 | 101 | "base_id": "1", |
96 | 102 | "parent_id": "2", |
97 | 103 | "trace_id": "44", |
98 | "info": info | |
104 | "info": info, | |
105 | "timestamp": now, | |
99 | 106 | } |
100 | 107 | |
101 | 108 | prof = profiler._Profiler("secret", base_id="1", parent_id="2") |
103 | 110 | |
104 | 111 | mock_notify.assert_called_once_with(payload) |
105 | 112 | |
113 | @mock.patch("osprofiler.profiler.datetime") | |
106 | 114 | @mock.patch("osprofiler.profiler.notifier.notify") |
107 | def test_profiler_stop(self, mock_notify): | |
115 | def test_profiler_stop(self, mock_notify, mock_datetime): | |
116 | now = datetime.datetime.utcnow() | |
117 | mock_datetime.datetime.utcnow.return_value = now | |
108 | 118 | prof = profiler._Profiler("secret", base_id="1", parent_id="2") |
109 | 119 | prof._trace_stack.append("44") |
110 | 120 | prof._name.append("abc") |
117 | 127 | "base_id": "1", |
118 | 128 | "parent_id": "2", |
119 | 129 | "trace_id": "44", |
120 | "info": info | |
130 | "info": info, | |
131 | "timestamp": now, | |
121 | 132 | } |
122 | 133 | |
123 | 134 | mock_notify.assert_called_once_with(payload) |
311 | 322 | self.assertIn(mock_start.call_args_list[0], |
312 | 323 | possible_mock_calls("rpc", expected_info)) |
313 | 324 | mock_stop.assert_called_once_with() |
325 | ||
326 | ||
327 | @six.add_metaclass(profiler.TracedMeta) | |
328 | class FakeTraceWithMetaclassBase(object): | |
329 | __trace_args__ = {"name": "rpc", | |
330 | "info": {"a": 10}} | |
331 | ||
332 | def method1(self, a, b, c=10): | |
333 | return a + b + c | |
334 | ||
335 | def method2(self, d, e): | |
336 | return d - e | |
337 | ||
338 | def method3(self, g=10, h=20): | |
339 | return g * h | |
340 | ||
341 | def _method(self, i): | |
342 | return i | |
343 | ||
344 | ||
345 | class FakeTraceDummy(FakeTraceWithMetaclassBase): | |
346 | def method4(self, j): | |
347 | return j | |
348 | ||
349 | ||
350 | class FakeTraceWithMetaclassHideArgs(FakeTraceWithMetaclassBase): | |
351 | __trace_args__ = {"name": "a", | |
352 | "info": {"b": 20}, | |
353 | "hide_args": True} | |
354 | ||
355 | def method5(self, k, l): | |
356 | return k + l | |
357 | ||
358 | ||
359 | class FakeTraceWithMetaclassPrivate(FakeTraceWithMetaclassBase): | |
360 | __trace_args__ = {"name": "rpc", | |
361 | "trace_private": True} | |
362 | ||
363 | def _new_private_method(self, m): | |
364 | return 2 * m | |
365 | ||
366 | ||
367 | class TraceWithMetaclassTestCase(test.TestCase): | |
368 | ||
369 | def test_no_name_exception(self): | |
370 | def define_class_with_no_name(): | |
371 | @six.add_metaclass(profiler.TracedMeta) | |
372 | class FakeTraceWithMetaclassNoName(FakeTracedCls): | |
373 | pass | |
374 | self.assertRaises(TypeError, define_class_with_no_name, 1) | |
375 | ||
376 | @mock.patch("osprofiler.profiler.stop") | |
377 | @mock.patch("osprofiler.profiler.start") | |
378 | def test_args(self, mock_start, mock_stop): | |
379 | fake_cls = FakeTraceWithMetaclassBase() | |
380 | self.assertEqual(30, fake_cls.method1(5, 15)) | |
381 | expected_info = { | |
382 | "a": 10, | |
383 | "function": { | |
384 | "name": | |
385 | "tests.test_profiler.FakeTraceWithMetaclassBase.method1", | |
386 | "args": str((fake_cls, 5, 15)), | |
387 | "kwargs": str({}) | |
388 | } | |
389 | } | |
390 | self.assertEqual(1, len(mock_start.call_args_list)) | |
391 | self.assertIn(mock_start.call_args_list[0], | |
392 | possible_mock_calls("rpc", expected_info)) | |
393 | mock_stop.assert_called_once_with() | |
394 | ||
395 | @mock.patch("osprofiler.profiler.stop") | |
396 | @mock.patch("osprofiler.profiler.start") | |
397 | def test_kwargs(self, mock_start, mock_stop): | |
398 | fake_cls = FakeTraceWithMetaclassBase() | |
399 | self.assertEqual(50, fake_cls.method3(g=5, h=10)) | |
400 | expected_info = { | |
401 | "a": 10, | |
402 | "function": { | |
403 | "name": | |
404 | "tests.test_profiler.FakeTraceWithMetaclassBase.method3", | |
405 | "args": str((fake_cls,)), | |
406 | "kwargs": str({"g": 5, "h": 10}) | |
407 | } | |
408 | } | |
409 | self.assertEqual(1, len(mock_start.call_args_list)) | |
410 | self.assertIn(mock_start.call_args_list[0], | |
411 | possible_mock_calls("rpc", expected_info)) | |
412 | mock_stop.assert_called_once_with() | |
413 | ||
414 | @mock.patch("osprofiler.profiler.stop") | |
415 | @mock.patch("osprofiler.profiler.start") | |
416 | def test_without_private(self, mock_start, mock_stop): | |
417 | fake_cls = FakeTraceWithMetaclassHideArgs() | |
418 | self.assertEqual(10, fake_cls._method(10)) | |
419 | self.assertFalse(mock_start.called) | |
420 | self.assertFalse(mock_stop.called) | |
421 | ||
422 | @mock.patch("osprofiler.profiler.stop") | |
423 | @mock.patch("osprofiler.profiler.start") | |
424 | def test_without_args(self, mock_start, mock_stop): | |
425 | fake_cls = FakeTraceWithMetaclassHideArgs() | |
426 | self.assertEqual(20, fake_cls.method5(5, 15)) | |
427 | expected_info = { | |
428 | "b": 20, | |
429 | "function": { | |
430 | "name": "tests.test_profiler.FakeTraceWithMetaclassHideArgs." | |
431 | "method5" | |
432 | } | |
433 | } | |
434 | ||
435 | self.assertEqual(1, len(mock_start.call_args_list)) | |
436 | self.assertIn(mock_start.call_args_list[0], | |
437 | possible_mock_calls("a", expected_info)) | |
438 | mock_stop.assert_called_once_with() | |
439 | ||
440 | @mock.patch("osprofiler.profiler.stop") | |
441 | @mock.patch("osprofiler.profiler.start") | |
442 | def test_private_methods(self, mock_start, mock_stop): | |
443 | fake_cls = FakeTraceWithMetaclassPrivate() | |
444 | self.assertEqual(10, fake_cls._new_private_method(5)) | |
445 | ||
446 | expected_info = { | |
447 | "function": { | |
448 | "name": "tests.test_profiler.FakeTraceWithMetaclassPrivate." | |
449 | "_new_private_method", | |
450 | "args": str((fake_cls, 5)), | |
451 | "kwargs": str({}) | |
452 | } | |
453 | } | |
454 | ||
455 | self.assertEqual(1, len(mock_start.call_args_list)) | |
456 | self.assertIn(mock_start.call_args_list[0], | |
457 | possible_mock_calls("rpc", expected_info)) | |
458 | mock_stop.assert_called_once_with() |
26 | 26 | handler = sqlalchemy._before_cursor_execute("sql") |
27 | 27 | |
28 | 28 | handler(mock.MagicMock(), 1, 2, 3, 4, 5) |
29 | expected_info = { | |
30 | "db.statement": 2, | |
31 | "db.params": 3 | |
32 | } | |
29 | expected_info = {"db": {"statement": 2, "params": 3}} | |
33 | 30 | mock_profiler.start.assert_called_once_with("sql", info=expected_info) |
34 | 31 | |
35 | 32 | @mock.patch("osprofiler.sqlalchemy.profiler") |
66 | 66 | |
67 | 67 | process_data = utils.signed_unpack(packed_data, hmac_data, [hmac]) |
68 | 68 | self.assertIn("hmac_key", process_data) |
69 | process_data.pop('hmac_key') | |
69 | process_data.pop("hmac_key") | |
70 | 70 | self.assertEqual(data, process_data) |
71 | 71 | |
72 | 72 | def test_signed_pack_unpack_many_keys(self): |
73 | keys = ['secret', 'secret2', 'secret3'] | |
73 | keys = ["secret", "secret2", "secret3"] | |
74 | 74 | data = {"some": "data"} |
75 | 75 | packed_data, hmac_data = utils.signed_pack(data, keys[-1]) |
76 | 76 | |
77 | 77 | process_data = utils.signed_unpack(packed_data, hmac_data, keys) |
78 | self.assertEqual(keys[-1], process_data['hmac_key']) | |
78 | self.assertEqual(keys[-1], process_data["hmac_key"]) | |
79 | 79 | |
80 | 80 | def test_signed_pack_unpack_many_wrong_keys(self): |
81 | keys = ['secret', 'secret2', 'secret3'] | |
81 | keys = ["secret", "secret2", "secret3"] | |
82 | 82 | data = {"some": "data"} |
83 | packed_data, hmac_data = utils.signed_pack(data, 'password') | |
83 | packed_data, hmac_data = utils.signed_pack(data, "password") | |
84 | 84 | |
85 | 85 | process_data = utils.signed_unpack(packed_data, hmac_data, keys) |
86 | 86 | self.assertIsNone(process_data) |
47 | 47 | |
48 | 48 | trace_info = utils.signed_unpack(headers["X-Trace-Info"], |
49 | 49 | headers["X-Trace-HMAC"], ["key"]) |
50 | self.assertIn('hmac_key', trace_info) | |
51 | self.assertEqual('key', trace_info.pop('hmac_key')) | |
52 | self.assertEqual({"parent_id": 'z', 'base_id': 'y'}, trace_info) | |
50 | self.assertIn("hmac_key", trace_info) | |
51 | self.assertEqual("key", trace_info.pop("hmac_key")) | |
52 | self.assertEqual({"parent_id": "z", "base_id": "y"}, trace_info) | |
53 | 53 | |
54 | 54 | @mock.patch("osprofiler.profiler.get") |
55 | 55 | def test_get_trace_id_headers_no_profiler(self, mock_get_profiler): |
62 | 62 | def setUp(self): |
63 | 63 | super(WebMiddlewareTestCase, self).setUp() |
64 | 64 | profiler._clean() |
65 | # it's default state of _ENABLED param, so let's set it here | |
66 | web._ENABLED = None | |
65 | 67 | self.addCleanup(profiler._clean) |
66 | 68 | |
67 | 69 | def tearDown(self): |
251 | 253 | parent_id="2") |
252 | 254 | expected_info = { |
253 | 255 | "request": { |
254 | "host_url": request.host_url, | |
255 | 256 | "path": request.path, |
256 | 257 | "query": request.query_string, |
257 | 258 | "method": request.method, |
279 | 280 | request.query_string = "query" |
280 | 281 | request.method = "method" |
281 | 282 | request.scheme = "scheme" |
282 | hmac_key = 'super_secret_key2' | |
283 | hmac_key = "super_secret_key2" | |
283 | 284 | |
284 | 285 | pack = utils.signed_pack({"base_id": "1", "parent_id": "2"}, hmac_key) |
285 | 286 | request.headers = { |
289 | 290 | "X-Trace-HMAC": pack[1] |
290 | 291 | } |
291 | 292 | |
292 | web.enable('super_secret_key1,super_secret_key2') | |
293 | web.enable("super_secret_key1,super_secret_key2") | |
293 | 294 | middleware = web.WsgiMiddleware("app", enabled=True) |
294 | 295 | self.assertEqual("yeah!", middleware(request)) |
295 | 296 | mock_profiler_init.assert_called_once_with(hmac_key=hmac_key, |
298 | 299 | |
299 | 300 | def test_disable(self): |
300 | 301 | web.disable() |
301 | self.assertTrue(web._DISABLED) | |
302 | self.assertFalse(web._ENABLED) | |
302 | 303 | |
303 | 304 | def test_enabled(self): |
304 | 305 | web.disable() |
305 | 306 | web.enable() |
306 | self.assertFalse(web._DISABLED) | |
307 | self.assertTrue(web._ENABLED) |
28 | 28 | changedir = doc/source |
29 | 29 | commands = make html |
30 | 30 | |
31 | [tox:jenkins] | |
32 | downloadcache = ~/cache/pip | |
33 | ||
34 | 31 | [flake8] |
35 | 32 | ignore = E126,H703 |
36 | 33 | show-source = true |
37 | 34 | builtins = _ |
38 | exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools | |
35 | exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools,setup.py | |
36 | ||
37 | [hacking] | |
38 | local-check-factory = tests.hacking.checks.factory |