Add openstack/common log and policy modules
Add the log and policy modules of openstack/common since these are
needed for the initial keymgr commit.
Change-Id: I14b774235a81a0537fea59d76167b0cf6e3831bd
Brianna Poulos
9 years ago
0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
1 | # not use this file except in compliance with the License. You may obtain | |
2 | # a copy of the License at | |
3 | # | |
4 | # http://www.apache.org/licenses/LICENSE-2.0 | |
5 | # | |
6 | # Unless required by applicable law or agreed to in writing, software | |
7 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
8 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
9 | # License for the specific language governing permissions and limitations | |
10 | # under the License. | |
11 | ||
12 | """oslo.i18n integration module. | |
13 | ||
14 | See http://docs.openstack.org/developer/oslo.i18n/usage.html | |
15 | ||
16 | """ | |
17 | ||
18 | try: | |
19 | import oslo.i18n | |
20 | ||
21 | # NOTE(dhellmann): This reference to o-s-l-o will be replaced by the | |
22 | # application name when this module is synced into the separate | |
23 | # repository. It is OK to have more than one translation function | |
24 | # using the same domain, since there will still only be one message | |
25 | # catalog. | |
26 | _translators = oslo.i18n.TranslatorFactory(domain='castellan') | |
27 | ||
28 | # The primary translation function using the well-known name "_" | |
29 | _ = _translators.primary | |
30 | ||
31 | # Translators for log levels. | |
32 | # | |
33 | # The abbreviated names are meant to reflect the usual use of a short | |
34 | # name like '_'. The "L" is for "log" and the other letter comes from | |
35 | # the level. | |
36 | _LI = _translators.log_info | |
37 | _LW = _translators.log_warning | |
38 | _LE = _translators.log_error | |
39 | _LC = _translators.log_critical | |
40 | except ImportError: | |
41 | # NOTE(dims): Support for cases where a project wants to use | |
42 | # code from oslo-incubator, but is not ready to be internationalized | |
43 | # (like tempest) | |
44 | _ = _LI = _LW = _LE = _LC = lambda x: x |
0 | # Copyright 2011 OpenStack Foundation. | |
1 | # All Rights Reserved. | |
2 | # | |
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
4 | # not use this file except in compliance with the License. You may obtain | |
5 | # a copy of the License at | |
6 | # | |
7 | # http://www.apache.org/licenses/LICENSE-2.0 | |
8 | # | |
9 | # Unless required by applicable law or agreed to in writing, software | |
10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
12 | # License for the specific language governing permissions and limitations | |
13 | # under the License. | |
14 | ||
15 | import contextlib | |
16 | import errno | |
17 | import logging | |
18 | import os | |
19 | import stat | |
20 | import tempfile | |
21 | ||
22 | from oslo.utils import excutils | |
23 | ||
24 | LOG = logging.getLogger(__name__) | |
25 | ||
26 | _FILE_CACHE = {} | |
27 | DEFAULT_MODE = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO | |
28 | ||
29 | ||
30 | def ensure_tree(path, mode=DEFAULT_MODE): | |
31 | """Create a directory (and any ancestor directories required) | |
32 | ||
33 | :param path: Directory to create | |
34 | :param mode: Directory creation permissions | |
35 | """ | |
36 | try: | |
37 | os.makedirs(path, mode) | |
38 | except OSError as exc: | |
39 | if exc.errno == errno.EEXIST: | |
40 | if not os.path.isdir(path): | |
41 | raise | |
42 | else: | |
43 | raise | |
44 | ||
45 | ||
46 | def read_cached_file(filename, force_reload=False): | |
47 | """Read from a file if it has been modified. | |
48 | ||
49 | :param force_reload: Whether to reload the file. | |
50 | :returns: A tuple with a boolean specifying if the data is fresh | |
51 | or not. | |
52 | """ | |
53 | global _FILE_CACHE | |
54 | ||
55 | if force_reload: | |
56 | delete_cached_file(filename) | |
57 | ||
58 | reloaded = False | |
59 | mtime = os.path.getmtime(filename) | |
60 | cache_info = _FILE_CACHE.setdefault(filename, {}) | |
61 | ||
62 | if not cache_info or mtime > cache_info.get('mtime', 0): | |
63 | LOG.debug("Reloading cached file %s" % filename) | |
64 | with open(filename) as fap: | |
65 | cache_info['data'] = fap.read() | |
66 | cache_info['mtime'] = mtime | |
67 | reloaded = True | |
68 | return (reloaded, cache_info['data']) | |
69 | ||
70 | ||
71 | def delete_cached_file(filename): | |
72 | """Delete cached file if present. | |
73 | ||
74 | :param filename: filename to delete | |
75 | """ | |
76 | global _FILE_CACHE | |
77 | ||
78 | if filename in _FILE_CACHE: | |
79 | del _FILE_CACHE[filename] | |
80 | ||
81 | ||
82 | def delete_if_exists(path, remove=os.unlink): | |
83 | """Delete a file, but ignore file not found error. | |
84 | ||
85 | :param path: File to delete | |
86 | :param remove: Optional function to remove passed path | |
87 | """ | |
88 | ||
89 | try: | |
90 | remove(path) | |
91 | except OSError as e: | |
92 | if e.errno != errno.ENOENT: | |
93 | raise | |
94 | ||
95 | ||
96 | @contextlib.contextmanager | |
97 | def remove_path_on_error(path, remove=delete_if_exists): | |
98 | """Protect code that wants to operate on PATH atomically. | |
99 | Any exception will cause PATH to be removed. | |
100 | ||
101 | :param path: File to work with | |
102 | :param remove: Optional function to remove passed path | |
103 | """ | |
104 | ||
105 | try: | |
106 | yield | |
107 | except Exception: | |
108 | with excutils.save_and_reraise_exception(): | |
109 | remove(path) | |
110 | ||
111 | ||
112 | def file_open(*args, **kwargs): | |
113 | """Open file | |
114 | ||
115 | see built-in open() documentation for more details | |
116 | ||
117 | Note: The reason this is kept in a separate module is to easily | |
118 | be able to provide a stub module that doesn't alter system | |
119 | state at all (for unit tests) | |
120 | """ | |
121 | return open(*args, **kwargs) | |
122 | ||
123 | ||
124 | def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): | |
125 | """Create temporary file or use existing file. | |
126 | ||
127 | This util is needed for creating temporary file with | |
128 | specified content, suffix and prefix. If path is not None, | |
129 | it will be used for writing content. If the path doesn't | |
130 | exist it'll be created. | |
131 | ||
132 | :param content: content for temporary file. | |
133 | :param path: same as parameter 'dir' for mkstemp | |
134 | :param suffix: same as parameter 'suffix' for mkstemp | |
135 | :param prefix: same as parameter 'prefix' for mkstemp | |
136 | ||
137 | For example: it can be used in database tests for creating | |
138 | configuration files. | |
139 | """ | |
140 | if path: | |
141 | ensure_tree(path) | |
142 | ||
143 | (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) | |
144 | try: | |
145 | os.write(fd, content) | |
146 | finally: | |
147 | os.close(fd) | |
148 | return path |
0 | # Copyright 2011 OpenStack Foundation. | |
1 | # All Rights Reserved. | |
2 | # | |
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
4 | # not use this file except in compliance with the License. You may obtain | |
5 | # a copy of the License at | |
6 | # | |
7 | # http://www.apache.org/licenses/LICENSE-2.0 | |
8 | # | |
9 | # Unless required by applicable law or agreed to in writing, software | |
10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
12 | # License for the specific language governing permissions and limitations | |
13 | # under the License. | |
14 | ||
15 | """Local storage of variables using weak references""" | |
16 | ||
17 | import threading | |
18 | import weakref | |
19 | ||
20 | ||
21 | class WeakLocal(threading.local): | |
22 | def __getattribute__(self, attr): | |
23 | rval = super(WeakLocal, self).__getattribute__(attr) | |
24 | if rval: | |
25 | # NOTE(mikal): this bit is confusing. What is stored is a weak | |
26 | # reference, not the value itself. We therefore need to lookup | |
27 | # the weak reference and return the inner value here. | |
28 | rval = rval() | |
29 | return rval | |
30 | ||
31 | def __setattr__(self, attr, value): | |
32 | value = weakref.ref(value) | |
33 | return super(WeakLocal, self).__setattr__(attr, value) | |
34 | ||
35 | ||
36 | # NOTE(mikal): the name "store" should be deprecated in the future | |
37 | store = WeakLocal() | |
38 | ||
39 | # A "weak" store uses weak references and allows an object to fall out of scope | |
40 | # when it falls out of scope in the code that uses the thread local storage. A | |
41 | # "strong" store will hold a reference to the object so that it never falls out | |
42 | # of scope. | |
43 | weak_store = WeakLocal() | |
44 | strong_store = threading.local() |
0 | # Copyright 2011 OpenStack Foundation. | |
1 | # Copyright 2010 United States Government as represented by the | |
2 | # Administrator of the National Aeronautics and Space Administration. | |
3 | # All Rights Reserved. | |
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
6 | # not use this file except in compliance with the License. You may obtain | |
7 | # a copy of the License at | |
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
14 | # License for the specific language governing permissions and limitations | |
15 | # under the License. | |
16 | ||
17 | """OpenStack logging handler. | |
18 | ||
19 | This module adds to logging functionality by adding the option to specify | |
20 | a context object when calling the various log methods. If the context object | |
21 | is not specified, default formatting is used. Additionally, an instance uuid | |
22 | may be passed as part of the log message, which is intended to make it easier | |
23 | for admins to find messages related to a specific instance. | |
24 | ||
25 | It also allows setting of formatting information through conf. | |
26 | ||
27 | """ | |
28 | ||
29 | import copy | |
30 | import inspect | |
31 | import itertools | |
32 | import logging | |
33 | import logging.config | |
34 | import logging.handlers | |
35 | import os | |
36 | import socket | |
37 | import sys | |
38 | import traceback | |
39 | ||
40 | from oslo.config import cfg | |
41 | from oslo.utils import importutils | |
42 | from oslo.serialization import jsonutils | |
43 | import six | |
44 | from six import moves | |
45 | ||
46 | _PY26 = sys.version_info[0:2] == (2, 6) | |
47 | ||
48 | from castellan.openstack.common._i18n import _ | |
49 | from castellan.openstack.common import local | |
50 | ||
51 | ||
52 | _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" | |
53 | ||
54 | ||
55 | common_cli_opts = [ | |
56 | cfg.BoolOpt('debug', | |
57 | short='d', | |
58 | default=False, | |
59 | help='Print debugging output (set logging level to ' | |
60 | 'DEBUG instead of default WARNING level).'), | |
61 | cfg.BoolOpt('verbose', | |
62 | short='v', | |
63 | default=False, | |
64 | help='Print more verbose output (set logging level to ' | |
65 | 'INFO instead of default WARNING level).'), | |
66 | ] | |
67 | ||
68 | logging_cli_opts = [ | |
69 | cfg.StrOpt('log-config-append', | |
70 | metavar='PATH', | |
71 | deprecated_name='log-config', | |
72 | help='The name of a logging configuration file. This file ' | |
73 | 'is appended to any existing logging configuration ' | |
74 | 'files. For details about logging configuration files, ' | |
75 | 'see the Python logging module documentation.'), | |
76 | cfg.StrOpt('log-format', | |
77 | metavar='FORMAT', | |
78 | help='DEPRECATED. ' | |
79 | 'A logging.Formatter log message format string which may ' | |
80 | 'use any of the available logging.LogRecord attributes. ' | |
81 | 'This option is deprecated. Please use ' | |
82 | 'logging_context_format_string and ' | |
83 | 'logging_default_format_string instead.'), | |
84 | cfg.StrOpt('log-date-format', | |
85 | default=_DEFAULT_LOG_DATE_FORMAT, | |
86 | metavar='DATE_FORMAT', | |
87 | help='Format string for %%(asctime)s in log records. ' | |
88 | 'Default: %(default)s .'), | |
89 | cfg.StrOpt('log-file', | |
90 | metavar='PATH', | |
91 | deprecated_name='logfile', | |
92 | help='(Optional) Name of log file to output to. ' | |
93 | 'If no default is set, logging will go to stdout.'), | |
94 | cfg.StrOpt('log-dir', | |
95 | deprecated_name='logdir', | |
96 | help='(Optional) The base directory used for relative ' | |
97 | '--log-file paths.'), | |
98 | cfg.BoolOpt('use-syslog', | |
99 | default=False, | |
100 | help='Use syslog for logging. ' | |
101 | 'Existing syslog format is DEPRECATED during I, ' | |
102 | 'and will change in J to honor RFC5424.'), | |
103 | cfg.BoolOpt('use-syslog-rfc-format', | |
104 | # TODO(bogdando) remove or use True after existing | |
105 | # syslog format deprecation in J | |
106 | default=False, | |
107 | help='(Optional) Enables or disables syslog rfc5424 format ' | |
108 | 'for logging. If enabled, prefixes the MSG part of the ' | |
109 | 'syslog message with APP-NAME (RFC5424). The ' | |
110 | 'format without the APP-NAME is deprecated in I, ' | |
111 | 'and will be removed in J.'), | |
112 | cfg.StrOpt('syslog-log-facility', | |
113 | default='LOG_USER', | |
114 | help='Syslog facility to receive log lines.') | |
115 | ] | |
116 | ||
117 | generic_log_opts = [ | |
118 | cfg.BoolOpt('use_stderr', | |
119 | default=True, | |
120 | help='Log output to standard error.') | |
121 | ] | |
122 | ||
123 | DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', | |
124 | 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', | |
125 | 'oslo.messaging=INFO', 'iso8601=WARN', | |
126 | 'requests.packages.urllib3.connectionpool=WARN', | |
127 | 'urllib3.connectionpool=WARN', 'websocket=WARN', | |
128 | "keystonemiddleware=WARN", "routes.middleware=WARN", | |
129 | "stevedore=WARN"] | |
130 | ||
131 | log_opts = [ | |
132 | cfg.StrOpt('logging_context_format_string', | |
133 | default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' | |
134 | '%(name)s [%(request_id)s %(user_identity)s] ' | |
135 | '%(instance)s%(message)s', | |
136 | help='Format string to use for log messages with context.'), | |
137 | cfg.StrOpt('logging_default_format_string', | |
138 | default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' | |
139 | '%(name)s [-] %(instance)s%(message)s', | |
140 | help='Format string to use for log messages without context.'), | |
141 | cfg.StrOpt('logging_debug_format_suffix', | |
142 | default='%(funcName)s %(pathname)s:%(lineno)d', | |
143 | help='Data to append to log format when level is DEBUG.'), | |
144 | cfg.StrOpt('logging_exception_prefix', | |
145 | default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' | |
146 | '%(instance)s', | |
147 | help='Prefix each line of exception output with this format.'), | |
148 | cfg.ListOpt('default_log_levels', | |
149 | default=DEFAULT_LOG_LEVELS, | |
150 | help='List of logger=LEVEL pairs.'), | |
151 | cfg.BoolOpt('publish_errors', | |
152 | default=False, | |
153 | help='Enables or disables publication of error events.'), | |
154 | cfg.BoolOpt('fatal_deprecations', | |
155 | default=False, | |
156 | help='Enables or disables fatal status of deprecations.'), | |
157 | ||
158 | # NOTE(mikal): there are two options here because sometimes we are handed | |
159 | # a full instance (and could include more information), and other times we | |
160 | # are just handed a UUID for the instance. | |
161 | cfg.StrOpt('instance_format', | |
162 | default='[instance: %(uuid)s] ', | |
163 | help='The format for an instance that is passed with the log ' | |
164 | 'message.'), | |
165 | cfg.StrOpt('instance_uuid_format', | |
166 | default='[instance: %(uuid)s] ', | |
167 | help='The format for an instance UUID that is passed with the ' | |
168 | 'log message.'), | |
169 | ] | |
170 | ||
171 | CONF = cfg.CONF | |
172 | CONF.register_cli_opts(common_cli_opts) | |
173 | CONF.register_cli_opts(logging_cli_opts) | |
174 | CONF.register_opts(generic_log_opts) | |
175 | CONF.register_opts(log_opts) | |
176 | ||
177 | ||
178 | def list_opts(): | |
179 | """Entry point for oslo.config-generator.""" | |
180 | return [(None, copy.deepcopy(common_cli_opts)), | |
181 | (None, copy.deepcopy(logging_cli_opts)), | |
182 | (None, copy.deepcopy(generic_log_opts)), | |
183 | (None, copy.deepcopy(log_opts)), | |
184 | ] | |
185 | ||
186 | ||
187 | # our new audit level | |
188 | # NOTE(jkoelker) Since we synthesized an audit level, make the logging | |
189 | # module aware of it so it acts like other levels. | |
190 | logging.AUDIT = logging.INFO + 1 | |
191 | logging.addLevelName(logging.AUDIT, 'AUDIT') | |
192 | ||
193 | ||
194 | try: | |
195 | NullHandler = logging.NullHandler | |
196 | except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 | |
197 | class NullHandler(logging.Handler): | |
198 | def handle(self, record): | |
199 | pass | |
200 | ||
201 | def emit(self, record): | |
202 | pass | |
203 | ||
204 | def createLock(self): | |
205 | self.lock = None | |
206 | ||
207 | ||
208 | def _dictify_context(context): | |
209 | if context is None: | |
210 | return None | |
211 | if not isinstance(context, dict) and getattr(context, 'to_dict', None): | |
212 | context = context.to_dict() | |
213 | return context | |
214 | ||
215 | ||
216 | def _get_binary_name(): | |
217 | return os.path.basename(inspect.stack()[-1][1]) | |
218 | ||
219 | ||
220 | def _get_log_file_path(binary=None): | |
221 | logfile = CONF.log_file | |
222 | logdir = CONF.log_dir | |
223 | ||
224 | if logfile and not logdir: | |
225 | return logfile | |
226 | ||
227 | if logfile and logdir: | |
228 | return os.path.join(logdir, logfile) | |
229 | ||
230 | if logdir: | |
231 | binary = binary or _get_binary_name() | |
232 | return '%s.log' % (os.path.join(logdir, binary),) | |
233 | ||
234 | return None | |
235 | ||
236 | ||
237 | class BaseLoggerAdapter(logging.LoggerAdapter): | |
238 | ||
239 | def audit(self, msg, *args, **kwargs): | |
240 | self.log(logging.AUDIT, msg, *args, **kwargs) | |
241 | ||
242 | def isEnabledFor(self, level): | |
243 | if _PY26: | |
244 | # This method was added in python 2.7 (and it does the exact | |
245 | # same logic, so we need to do the exact same logic so that | |
246 | # python 2.6 has this capability as well). | |
247 | return self.logger.isEnabledFor(level) | |
248 | else: | |
249 | return super(BaseLoggerAdapter, self).isEnabledFor(level) | |
250 | ||
251 | ||
252 | class LazyAdapter(BaseLoggerAdapter): | |
253 | def __init__(self, name='unknown', version='unknown'): | |
254 | self._logger = None | |
255 | self.extra = {} | |
256 | self.name = name | |
257 | self.version = version | |
258 | ||
259 | @property | |
260 | def logger(self): | |
261 | if not self._logger: | |
262 | self._logger = getLogger(self.name, self.version) | |
263 | if six.PY3: | |
264 | # In Python 3, the code fails because the 'manager' attribute | |
265 | # cannot be found when using a LoggerAdapter as the | |
266 | # underlying logger. Work around this issue. | |
267 | self._logger.manager = self._logger.logger.manager | |
268 | return self._logger | |
269 | ||
270 | ||
271 | class ContextAdapter(BaseLoggerAdapter): | |
272 | warn = logging.LoggerAdapter.warning | |
273 | ||
274 | def __init__(self, logger, project_name, version_string): | |
275 | self.logger = logger | |
276 | self.project = project_name | |
277 | self.version = version_string | |
278 | self._deprecated_messages_sent = dict() | |
279 | ||
280 | @property | |
281 | def handlers(self): | |
282 | return self.logger.handlers | |
283 | ||
284 | def deprecated(self, msg, *args, **kwargs): | |
285 | """Call this method when a deprecated feature is used. | |
286 | ||
287 | If the system is configured for fatal deprecations then the message | |
288 | is logged at the 'critical' level and :class:`DeprecatedConfig` will | |
289 | be raised. | |
290 | ||
291 | Otherwise, the message will be logged (once) at the 'warn' level. | |
292 | ||
293 | :raises: :class:`DeprecatedConfig` if the system is configured for | |
294 | fatal deprecations. | |
295 | ||
296 | """ | |
297 | stdmsg = _("Deprecated: %s") % msg | |
298 | if CONF.fatal_deprecations: | |
299 | self.critical(stdmsg, *args, **kwargs) | |
300 | raise DeprecatedConfig(msg=stdmsg) | |
301 | ||
302 | # Using a list because a tuple with dict can't be stored in a set. | |
303 | sent_args = self._deprecated_messages_sent.setdefault(msg, list()) | |
304 | ||
305 | if args in sent_args: | |
306 | # Already logged this message, so don't log it again. | |
307 | return | |
308 | ||
309 | sent_args.append(args) | |
310 | self.warn(stdmsg, *args, **kwargs) | |
311 | ||
312 | def process(self, msg, kwargs): | |
313 | # NOTE(jecarey): If msg is not unicode, coerce it into unicode | |
314 | # before it can get to the python logging and | |
315 | # possibly cause string encoding trouble | |
316 | if not isinstance(msg, six.text_type): | |
317 | msg = six.text_type(msg) | |
318 | ||
319 | if 'extra' not in kwargs: | |
320 | kwargs['extra'] = {} | |
321 | extra = kwargs['extra'] | |
322 | ||
323 | context = kwargs.pop('context', None) | |
324 | if not context: | |
325 | context = getattr(local.store, 'context', None) | |
326 | if context: | |
327 | extra.update(_dictify_context(context)) | |
328 | ||
329 | instance = kwargs.pop('instance', None) | |
330 | instance_uuid = (extra.get('instance_uuid') or | |
331 | kwargs.pop('instance_uuid', None)) | |
332 | instance_extra = '' | |
333 | if instance: | |
334 | instance_extra = CONF.instance_format % instance | |
335 | elif instance_uuid: | |
336 | instance_extra = (CONF.instance_uuid_format | |
337 | % {'uuid': instance_uuid}) | |
338 | extra['instance'] = instance_extra | |
339 | ||
340 | extra.setdefault('user_identity', kwargs.pop('user_identity', None)) | |
341 | ||
342 | extra['project'] = self.project | |
343 | extra['version'] = self.version | |
344 | extra['extra'] = extra.copy() | |
345 | return msg, kwargs | |
346 | ||
347 | ||
348 | class JSONFormatter(logging.Formatter): | |
349 | def __init__(self, fmt=None, datefmt=None): | |
350 | # NOTE(jkoelker) we ignore the fmt argument, but its still there | |
351 | # since logging.config.fileConfig passes it. | |
352 | self.datefmt = datefmt | |
353 | ||
354 | def formatException(self, ei, strip_newlines=True): | |
355 | lines = traceback.format_exception(*ei) | |
356 | if strip_newlines: | |
357 | lines = [moves.filter( | |
358 | lambda x: x, | |
359 | line.rstrip().splitlines()) for line in lines] | |
360 | lines = list(itertools.chain(*lines)) | |
361 | return lines | |
362 | ||
363 | def format(self, record): | |
364 | message = {'message': record.getMessage(), | |
365 | 'asctime': self.formatTime(record, self.datefmt), | |
366 | 'name': record.name, | |
367 | 'msg': record.msg, | |
368 | 'args': record.args, | |
369 | 'levelname': record.levelname, | |
370 | 'levelno': record.levelno, | |
371 | 'pathname': record.pathname, | |
372 | 'filename': record.filename, | |
373 | 'module': record.module, | |
374 | 'lineno': record.lineno, | |
375 | 'funcname': record.funcName, | |
376 | 'created': record.created, | |
377 | 'msecs': record.msecs, | |
378 | 'relative_created': record.relativeCreated, | |
379 | 'thread': record.thread, | |
380 | 'thread_name': record.threadName, | |
381 | 'process_name': record.processName, | |
382 | 'process': record.process, | |
383 | 'traceback': None} | |
384 | ||
385 | if hasattr(record, 'extra'): | |
386 | message['extra'] = record.extra | |
387 | ||
388 | if record.exc_info: | |
389 | message['traceback'] = self.formatException(record.exc_info) | |
390 | ||
391 | return jsonutils.dumps(message) | |
392 | ||
393 | ||
394 | def _create_logging_excepthook(product_name): | |
395 | def logging_excepthook(exc_type, value, tb): | |
396 | extra = {'exc_info': (exc_type, value, tb)} | |
397 | getLogger(product_name).critical( | |
398 | "".join(traceback.format_exception_only(exc_type, value)), | |
399 | **extra) | |
400 | return logging_excepthook | |
401 | ||
402 | ||
403 | class LogConfigError(Exception): | |
404 | ||
405 | message = _('Error loading logging config %(log_config)s: %(err_msg)s') | |
406 | ||
407 | def __init__(self, log_config, err_msg): | |
408 | self.log_config = log_config | |
409 | self.err_msg = err_msg | |
410 | ||
411 | def __str__(self): | |
412 | return self.message % dict(log_config=self.log_config, | |
413 | err_msg=self.err_msg) | |
414 | ||
415 | ||
416 | def _load_log_config(log_config_append): | |
417 | try: | |
418 | logging.config.fileConfig(log_config_append, | |
419 | disable_existing_loggers=False) | |
420 | except (moves.configparser.Error, KeyError) as exc: | |
421 | raise LogConfigError(log_config_append, six.text_type(exc)) | |
422 | ||
423 | ||
424 | def setup(product_name, version='unknown'): | |
425 | """Setup logging.""" | |
426 | if CONF.log_config_append: | |
427 | _load_log_config(CONF.log_config_append) | |
428 | else: | |
429 | _setup_logging_from_conf(product_name, version) | |
430 | sys.excepthook = _create_logging_excepthook(product_name) | |
431 | ||
432 | ||
433 | def set_defaults(logging_context_format_string=None, | |
434 | default_log_levels=None): | |
435 | # Just in case the caller is not setting the | |
436 | # default_log_level. This is insurance because | |
437 | # we introduced the default_log_level parameter | |
438 | # later in a backwards in-compatible change | |
439 | if default_log_levels is not None: | |
440 | cfg.set_defaults( | |
441 | log_opts, | |
442 | default_log_levels=default_log_levels) | |
443 | if logging_context_format_string is not None: | |
444 | cfg.set_defaults( | |
445 | log_opts, | |
446 | logging_context_format_string=logging_context_format_string) | |
447 | ||
448 | ||
449 | def _find_facility_from_conf(): | |
450 | facility_names = logging.handlers.SysLogHandler.facility_names | |
451 | facility = getattr(logging.handlers.SysLogHandler, | |
452 | CONF.syslog_log_facility, | |
453 | None) | |
454 | ||
455 | if facility is None and CONF.syslog_log_facility in facility_names: | |
456 | facility = facility_names.get(CONF.syslog_log_facility) | |
457 | ||
458 | if facility is None: | |
459 | valid_facilities = facility_names.keys() | |
460 | consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', | |
461 | 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', | |
462 | 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', | |
463 | 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', | |
464 | 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] | |
465 | valid_facilities.extend(consts) | |
466 | raise TypeError(_('syslog facility must be one of: %s') % | |
467 | ', '.join("'%s'" % fac | |
468 | for fac in valid_facilities)) | |
469 | ||
470 | return facility | |
471 | ||
472 | ||
473 | class RFCSysLogHandler(logging.handlers.SysLogHandler): | |
474 | def __init__(self, *args, **kwargs): | |
475 | self.binary_name = _get_binary_name() | |
476 | # Do not use super() unless type(logging.handlers.SysLogHandler) | |
477 | # is 'type' (Python 2.7). | |
478 | # Use old style calls, if the type is 'classobj' (Python 2.6) | |
479 | logging.handlers.SysLogHandler.__init__(self, *args, **kwargs) | |
480 | ||
481 | def format(self, record): | |
482 | # Do not use super() unless type(logging.handlers.SysLogHandler) | |
483 | # is 'type' (Python 2.7). | |
484 | # Use old style calls, if the type is 'classobj' (Python 2.6) | |
485 | msg = logging.handlers.SysLogHandler.format(self, record) | |
486 | msg = self.binary_name + ' ' + msg | |
487 | return msg | |
488 | ||
489 | ||
490 | def _setup_logging_from_conf(project, version): | |
491 | log_root = getLogger(None).logger | |
492 | for handler in log_root.handlers: | |
493 | log_root.removeHandler(handler) | |
494 | ||
495 | logpath = _get_log_file_path() | |
496 | if logpath: | |
497 | filelog = logging.handlers.WatchedFileHandler(logpath) | |
498 | log_root.addHandler(filelog) | |
499 | ||
500 | if CONF.use_stderr: | |
501 | streamlog = ColorHandler() | |
502 | log_root.addHandler(streamlog) | |
503 | ||
504 | elif not logpath: | |
505 | # pass sys.stdout as a positional argument | |
506 | # python2.6 calls the argument strm, in 2.7 it's stream | |
507 | streamlog = logging.StreamHandler(sys.stdout) | |
508 | log_root.addHandler(streamlog) | |
509 | ||
510 | if CONF.publish_errors: | |
511 | handler = importutils.import_object( | |
512 | "oslo.messaging.notify.log_handler.PublishErrorsHandler", | |
513 | logging.ERROR) | |
514 | log_root.addHandler(handler) | |
515 | ||
516 | datefmt = CONF.log_date_format | |
517 | for handler in log_root.handlers: | |
518 | # NOTE(alaski): CONF.log_format overrides everything currently. This | |
519 | # should be deprecated in favor of context aware formatting. | |
520 | if CONF.log_format: | |
521 | handler.setFormatter(logging.Formatter(fmt=CONF.log_format, | |
522 | datefmt=datefmt)) | |
523 | log_root.info('Deprecated: log_format is now deprecated and will ' | |
524 | 'be removed in the next release') | |
525 | else: | |
526 | handler.setFormatter(ContextFormatter(project=project, | |
527 | version=version, | |
528 | datefmt=datefmt)) | |
529 | ||
530 | if CONF.debug: | |
531 | log_root.setLevel(logging.DEBUG) | |
532 | elif CONF.verbose: | |
533 | log_root.setLevel(logging.INFO) | |
534 | else: | |
535 | log_root.setLevel(logging.WARNING) | |
536 | ||
537 | for pair in CONF.default_log_levels: | |
538 | mod, _sep, level_name = pair.partition('=') | |
539 | logger = logging.getLogger(mod) | |
540 | # NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name | |
541 | # to integer code. | |
542 | if sys.version_info < (2, 7): | |
543 | level = logging.getLevelName(level_name) | |
544 | logger.setLevel(level) | |
545 | else: | |
546 | logger.setLevel(level_name) | |
547 | ||
548 | if CONF.use_syslog: | |
549 | try: | |
550 | facility = _find_facility_from_conf() | |
551 | # TODO(bogdando) use the format provided by RFCSysLogHandler | |
552 | # after existing syslog format deprecation in J | |
553 | if CONF.use_syslog_rfc_format: | |
554 | syslog = RFCSysLogHandler(address='/dev/log', | |
555 | facility=facility) | |
556 | else: | |
557 | syslog = logging.handlers.SysLogHandler(address='/dev/log', | |
558 | facility=facility) | |
559 | log_root.addHandler(syslog) | |
560 | except socket.error: | |
561 | log_root.error('Unable to add syslog handler. Verify that syslog ' | |
562 | 'is running.') | |
563 | ||
564 | ||
565 | _loggers = {} | |
566 | ||
567 | ||
568 | def getLogger(name='unknown', version='unknown'): | |
569 | if name not in _loggers: | |
570 | _loggers[name] = ContextAdapter(logging.getLogger(name), | |
571 | name, | |
572 | version) | |
573 | return _loggers[name] | |
574 | ||
575 | ||
576 | def getLazyLogger(name='unknown', version='unknown'): | |
577 | """Returns lazy logger. | |
578 | ||
579 | Creates a pass-through logger that does not create the real logger | |
580 | until it is really needed and delegates all calls to the real logger | |
581 | once it is created. | |
582 | """ | |
583 | return LazyAdapter(name, version) | |
584 | ||
585 | ||
586 | class WritableLogger(object): | |
587 | """A thin wrapper that responds to `write` and logs.""" | |
588 | ||
589 | def __init__(self, logger, level=logging.INFO): | |
590 | self.logger = logger | |
591 | self.level = level | |
592 | ||
593 | def write(self, msg): | |
594 | self.logger.log(self.level, msg.rstrip()) | |
595 | ||
596 | ||
597 | class ContextFormatter(logging.Formatter): | |
598 | """A context.RequestContext aware formatter configured through flags. | |
599 | ||
600 | The flags used to set format strings are: logging_context_format_string | |
601 | and logging_default_format_string. You can also specify | |
602 | logging_debug_format_suffix to append extra formatting if the log level is | |
603 | debug. | |
604 | ||
605 | For information about what variables are available for the formatter see: | |
606 | http://docs.python.org/library/logging.html#formatter | |
607 | ||
608 | If available, uses the context value stored in TLS - local.store.context | |
609 | ||
610 | """ | |
611 | ||
612 | def __init__(self, *args, **kwargs): | |
613 | """Initialize ContextFormatter instance | |
614 | ||
615 | Takes additional keyword arguments which can be used in the message | |
616 | format string. | |
617 | ||
618 | :keyword project: project name | |
619 | :type project: string | |
620 | :keyword version: project version | |
621 | :type version: string | |
622 | ||
623 | """ | |
624 | ||
625 | self.project = kwargs.pop('project', 'unknown') | |
626 | self.version = kwargs.pop('version', 'unknown') | |
627 | ||
628 | logging.Formatter.__init__(self, *args, **kwargs) | |
629 | ||
630 | def format(self, record): | |
631 | """Uses contextstring if request_id is set, otherwise default.""" | |
632 | ||
633 | # NOTE(jecarey): If msg is not unicode, coerce it into unicode | |
634 | # before it can get to the python logging and | |
635 | # possibly cause string encoding trouble | |
636 | if not isinstance(record.msg, six.text_type): | |
637 | record.msg = six.text_type(record.msg) | |
638 | ||
639 | # store project info | |
640 | record.project = self.project | |
641 | record.version = self.version | |
642 | ||
643 | # store request info | |
644 | context = getattr(local.store, 'context', None) | |
645 | if context: | |
646 | d = _dictify_context(context) | |
647 | for k, v in d.items(): | |
648 | setattr(record, k, v) | |
649 | ||
650 | # NOTE(sdague): default the fancier formatting params | |
651 | # to an empty string so we don't throw an exception if | |
652 | # they get used | |
653 | for key in ('instance', 'color', 'user_identity'): | |
654 | if key not in record.__dict__: | |
655 | record.__dict__[key] = '' | |
656 | ||
657 | if record.__dict__.get('request_id'): | |
658 | fmt = CONF.logging_context_format_string | |
659 | else: | |
660 | fmt = CONF.logging_default_format_string | |
661 | ||
662 | if (record.levelno == logging.DEBUG and | |
663 | CONF.logging_debug_format_suffix): | |
664 | fmt += " " + CONF.logging_debug_format_suffix | |
665 | ||
666 | if sys.version_info < (3, 2): | |
667 | self._fmt = fmt | |
668 | else: | |
669 | self._style = logging.PercentStyle(fmt) | |
670 | self._fmt = self._style._fmt | |
671 | # Cache this on the record, Logger will respect our formatted copy | |
672 | if record.exc_info: | |
673 | record.exc_text = self.formatException(record.exc_info, record) | |
674 | return logging.Formatter.format(self, record) | |
675 | ||
676 | def formatException(self, exc_info, record=None): | |
677 | """Format exception output with CONF.logging_exception_prefix.""" | |
678 | if not record: | |
679 | return logging.Formatter.formatException(self, exc_info) | |
680 | ||
681 | stringbuffer = moves.StringIO() | |
682 | traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], | |
683 | None, stringbuffer) | |
684 | lines = stringbuffer.getvalue().split('\n') | |
685 | stringbuffer.close() | |
686 | ||
687 | if CONF.logging_exception_prefix.find('%(asctime)') != -1: | |
688 | record.asctime = self.formatTime(record, self.datefmt) | |
689 | ||
690 | formatted_lines = [] | |
691 | for line in lines: | |
692 | pl = CONF.logging_exception_prefix % record.__dict__ | |
693 | fl = '%s%s' % (pl, line) | |
694 | formatted_lines.append(fl) | |
695 | return '\n'.join(formatted_lines) | |
696 | ||
697 | ||
698 | class ColorHandler(logging.StreamHandler): | |
699 | LEVEL_COLORS = { | |
700 | logging.DEBUG: '\033[00;32m', # GREEN | |
701 | logging.INFO: '\033[00;36m', # CYAN | |
702 | logging.AUDIT: '\033[01;36m', # BOLD CYAN | |
703 | logging.WARN: '\033[01;33m', # BOLD YELLOW | |
704 | logging.ERROR: '\033[01;31m', # BOLD RED | |
705 | logging.CRITICAL: '\033[01;31m', # BOLD RED | |
706 | } | |
707 | ||
708 | def format(self, record): | |
709 | record.color = self.LEVEL_COLORS[record.levelno] | |
710 | return logging.StreamHandler.format(self, record) | |
711 | ||
712 | ||
713 | class DeprecatedConfig(Exception): | |
714 | message = _("Fatal call to deprecated config: %(msg)s") | |
715 | ||
716 | def __init__(self, msg): | |
717 | super(Exception, self).__init__(self.message % dict(msg=msg)) |
0 | # -*- coding: utf-8 -*- | |
1 | # | |
2 | # Copyright (c) 2012 OpenStack Foundation. | |
3 | # All Rights Reserved. | |
4 | # | |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may | |
6 | # not use this file except in compliance with the License. You may obtain | |
7 | # a copy of the License at | |
8 | # | |
9 | # http://www.apache.org/licenses/LICENSE-2.0 | |
10 | # | |
11 | # Unless required by applicable law or agreed to in writing, software | |
12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | |
13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | |
14 | # License for the specific language governing permissions and limitations | |
15 | # under the License. | |
16 | ||
17 | """ | |
18 | Common Policy Engine Implementation | |
19 | ||
20 | Policies can be expressed in one of two forms: A list of lists, or a | |
21 | string written in the new policy language. | |
22 | ||
23 | In the list-of-lists representation, each check inside the innermost | |
24 | list is combined as with an "and" conjunction--for that check to pass, | |
25 | all the specified checks must pass. These innermost lists are then | |
26 | combined as with an "or" conjunction. As an example, take the following | |
27 | rule, expressed in the list-of-lists representation:: | |
28 | ||
29 | [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] | |
30 | ||
31 | This is the original way of expressing policies, but there now exists a | |
32 | new way: the policy language. | |
33 | ||
34 | In the policy language, each check is specified the same way as in the | |
35 | list-of-lists representation: a simple "a:b" pair that is matched to | |
36 | the correct class to perform that check:: | |
37 | ||
38 | +===========================================================================+ | |
39 | | TYPE | SYNTAX | | |
40 | +===========================================================================+ | |
41 | |User's Role | role:admin | | |
42 | +---------------------------------------------------------------------------+ | |
43 | |Rules already defined on policy | rule:admin_required | | |
44 | +---------------------------------------------------------------------------+ | |
45 | |Against URL's¹ | http://my-url.org/check | | |
46 | +---------------------------------------------------------------------------+ | |
47 | |User attributes² | project_id:%(target.project.id)s | | |
48 | +---------------------------------------------------------------------------+ | |
49 | |Strings | <variable>:'xpto2035abc' | | |
50 | | | 'myproject':<variable> | | |
51 | +---------------------------------------------------------------------------+ | |
52 | | | project_id:xpto2035abc | | |
53 | |Literals | domain_id:20 | | |
54 | | | True:%(user.enabled)s | | |
55 | +===========================================================================+ | |
56 | ||
57 | ¹URL checking must return 'True' to be valid | |
58 | ²User attributes (obtained through the token): user_id, domain_id or project_id | |
59 | ||
60 | Conjunction operators are available, allowing for more expressiveness | |
61 | in crafting policies. So, in the policy language, the previous check in | |
62 | list-of-lists becomes:: | |
63 | ||
64 | role:admin or (project_id:%(project_id)s and role:projectadmin) | |
65 | ||
66 | The policy language also has the "not" operator, allowing a richer | |
67 | policy rule:: | |
68 | ||
69 | project_id:%(project_id)s and not role:dunce | |
70 | ||
71 | Attributes sent along with API calls can be used by the policy engine | |
72 | (on the right side of the expression), by using the following syntax:: | |
73 | ||
74 | <some_value>:%(user.id)s | |
75 | ||
76 | Contextual attributes of objects identified by their IDs are loaded | |
77 | from the database. They are also available to the policy engine and | |
78 | can be checked through the `target` keyword:: | |
79 | ||
80 | <some_value>:%(target.role.name)s | |
81 | ||
82 | Finally, two special policy checks should be mentioned; the policy | |
83 | check "@" will always accept an access, and the policy check "!" will | |
84 | always reject an access. (Note that if a rule is either the empty | |
85 | list ("[]") or the empty string, this is equivalent to the "@" policy | |
86 | check.) Of these, the "!" policy check is probably the most useful, | |
87 | as it allows particular rules to be explicitly disabled. | |
88 | """ | |
89 | ||
90 | import abc | |
91 | import ast | |
92 | import copy | |
93 | import os | |
94 | import re | |
95 | ||
96 | from oslo.config import cfg | |
97 | from oslo.serialization import jsonutils | |
98 | import six | |
99 | import six.moves.urllib.parse as urlparse | |
100 | import six.moves.urllib.request as urlrequest | |
101 | ||
102 | from castellan.openstack.common import fileutils | |
103 | from castellan.openstack.common._i18n import _, _LE, _LI | |
104 | from castellan.openstack.common import log as logging | |
105 | ||
106 | ||
107 | policy_opts = [ | |
108 | cfg.StrOpt('policy_file', | |
109 | default='policy.json', | |
110 | help=_('The JSON file that defines policies.')), | |
111 | cfg.StrOpt('policy_default_rule', | |
112 | default='default', | |
113 | help=_('Default rule. Enforced when a requested rule is not ' | |
114 | 'found.')), | |
115 | cfg.MultiStrOpt('policy_dirs', | |
116 | default=['policy.d'], | |
117 | help=_('Directories where policy configuration files are ' | |
118 | 'stored. They can be relative to any directory ' | |
119 | 'in the search path defined by the config_dir ' | |
120 | 'option, or absolute paths. The file defined by ' | |
121 | 'policy_file must exist for these directories to ' | |
122 | 'be searched.')), | |
123 | ] | |
124 | ||
125 | CONF = cfg.CONF | |
126 | CONF.register_opts(policy_opts) | |
127 | ||
128 | LOG = logging.getLogger(__name__) | |
129 | ||
130 | _checks = {} | |
131 | ||
132 | ||
133 | def list_opts(): | |
134 | """Entry point for oslo.config-generator.""" | |
135 | return [(None, copy.deepcopy(policy_opts))] | |
136 | ||
137 | ||
138 | class PolicyNotAuthorized(Exception): | |
139 | ||
140 | def __init__(self, rule): | |
141 | msg = _("Policy doesn't allow %s to be performed.") % rule | |
142 | super(PolicyNotAuthorized, self).__init__(msg) | |
143 | ||
144 | ||
145 | class Rules(dict): | |
146 | """A store for rules. Handles the default_rule setting directly.""" | |
147 | ||
148 | @classmethod | |
149 | def load_json(cls, data, default_rule=None): | |
150 | """Allow loading of JSON rule data.""" | |
151 | ||
152 | # Suck in the JSON data and parse the rules | |
153 | rules = dict((k, parse_rule(v)) for k, v in | |
154 | jsonutils.loads(data).items()) | |
155 | ||
156 | return cls(rules, default_rule) | |
157 | ||
158 | def __init__(self, rules=None, default_rule=None): | |
159 | """Initialize the Rules store.""" | |
160 | ||
161 | super(Rules, self).__init__(rules or {}) | |
162 | self.default_rule = default_rule | |
163 | ||
164 | def __missing__(self, key): | |
165 | """Implements the default rule handling.""" | |
166 | ||
167 | if isinstance(self.default_rule, dict): | |
168 | raise KeyError(key) | |
169 | ||
170 | # If the default rule isn't actually defined, do something | |
171 | # reasonably intelligent | |
172 | if not self.default_rule: | |
173 | raise KeyError(key) | |
174 | ||
175 | if isinstance(self.default_rule, BaseCheck): | |
176 | return self.default_rule | |
177 | ||
178 | # We need to check this or we can get infinite recursion | |
179 | if self.default_rule not in self: | |
180 | raise KeyError(key) | |
181 | ||
182 | elif isinstance(self.default_rule, six.string_types): | |
183 | return self[self.default_rule] | |
184 | ||
185 | def __str__(self): | |
186 | """Dumps a string representation of the rules.""" | |
187 | ||
188 | # Start by building the canonical strings for the rules | |
189 | out_rules = {} | |
190 | for key, value in self.items(): | |
191 | # Use empty string for singleton TrueCheck instances | |
192 | if isinstance(value, TrueCheck): | |
193 | out_rules[key] = '' | |
194 | else: | |
195 | out_rules[key] = str(value) | |
196 | ||
197 | # Dump a pretty-printed JSON representation | |
198 | return jsonutils.dumps(out_rules, indent=4) | |
199 | ||
200 | ||
201 | class Enforcer(object): | |
202 | """Responsible for loading and enforcing rules. | |
203 | ||
204 | :param policy_file: Custom policy file to use, if none is | |
205 | specified, `CONF.policy_file` will be | |
206 | used. | |
207 | :param rules: Default dictionary / Rules to use. It will be | |
208 | considered just in the first instantiation. If | |
209 | `load_rules(True)`, `clear()` or `set_rules(True)` | |
210 | is called this will be overwritten. | |
211 | :param default_rule: Default rule to use, CONF.default_rule will | |
212 | be used if none is specified. | |
213 | :param use_conf: Whether to load rules from cache or config file. | |
214 | :param overwrite: Whether to overwrite existing rules when reload rules | |
215 | from config file. | |
216 | """ | |
217 | ||
218 | def __init__(self, policy_file=None, rules=None, | |
219 | default_rule=None, use_conf=True, overwrite=True): | |
220 | self.default_rule = default_rule or CONF.policy_default_rule | |
221 | self.rules = Rules(rules, self.default_rule) | |
222 | ||
223 | self.policy_path = None | |
224 | self.policy_file = policy_file or CONF.policy_file | |
225 | self.use_conf = use_conf | |
226 | self.overwrite = overwrite | |
227 | ||
228 | def set_rules(self, rules, overwrite=True, use_conf=False): | |
229 | """Create a new Rules object based on the provided dict of rules. | |
230 | ||
231 | :param rules: New rules to use. It should be an instance of dict. | |
232 | :param overwrite: Whether to overwrite current rules or update them | |
233 | with the new rules. | |
234 | :param use_conf: Whether to reload rules from cache or config file. | |
235 | """ | |
236 | ||
237 | if not isinstance(rules, dict): | |
238 | raise TypeError(_("Rules must be an instance of dict or Rules, " | |
239 | "got %s instead") % type(rules)) | |
240 | self.use_conf = use_conf | |
241 | if overwrite: | |
242 | self.rules = Rules(rules, self.default_rule) | |
243 | else: | |
244 | self.rules.update(rules) | |
245 | ||
246 | def clear(self): | |
247 | """Clears Enforcer rules, policy's cache and policy's path.""" | |
248 | self.set_rules({}) | |
249 | fileutils.delete_cached_file(self.policy_path) | |
250 | self.default_rule = None | |
251 | self.policy_path = None | |
252 | ||
253 | def load_rules(self, force_reload=False): | |
254 | """Loads policy_path's rules. | |
255 | ||
256 | Policy file is cached and will be reloaded if modified. | |
257 | ||
258 | :param force_reload: Whether to reload rules from config file. | |
259 | """ | |
260 | ||
261 | if force_reload: | |
262 | self.use_conf = force_reload | |
263 | ||
264 | if self.use_conf: | |
265 | if not self.policy_path: | |
266 | self.policy_path = self._get_policy_path(self.policy_file) | |
267 | ||
268 | self._load_policy_file(self.policy_path, force_reload, | |
269 | overwrite=self.overwrite) | |
270 | for path in CONF.policy_dirs: | |
271 | try: | |
272 | path = self._get_policy_path(path) | |
273 | except cfg.ConfigFilesNotFoundError: | |
274 | LOG.info(_LI("Can not find policy directory: %s"), path) | |
275 | continue | |
276 | self._walk_through_policy_directory(path, | |
277 | self._load_policy_file, | |
278 | force_reload, False) | |
279 | ||
280 | @staticmethod | |
281 | def _walk_through_policy_directory(path, func, *args): | |
282 | # We do not iterate over sub-directories. | |
283 | policy_files = next(os.walk(path))[2] | |
284 | policy_files.sort() | |
285 | for policy_file in [p for p in policy_files if not p.startswith('.')]: | |
286 | func(os.path.join(path, policy_file), *args) | |
287 | ||
288 | def _load_policy_file(self, path, force_reload, overwrite=True): | |
289 | reloaded, data = fileutils.read_cached_file( | |
290 | path, force_reload=force_reload) | |
291 | if reloaded or not self.rules or not overwrite: | |
292 | rules = Rules.load_json(data, self.default_rule) | |
293 | self.set_rules(rules, overwrite=overwrite, use_conf=True) | |
294 | LOG.debug("Rules successfully reloaded") | |
295 | ||
296 | def _get_policy_path(self, path): | |
297 | """Locate the policy json data file/path. | |
298 | ||
299 | :param path: It's value can be a full path or related path. When | |
300 | full path specified, this function just returns the full | |
301 | path. When related path specified, this function will | |
302 | search configuration directories to find one that exists. | |
303 | ||
304 | :returns: The policy path | |
305 | ||
306 | :raises: ConfigFilesNotFoundError if the file/path couldn't | |
307 | be located. | |
308 | """ | |
309 | policy_path = CONF.find_file(path) | |
310 | ||
311 | if policy_path: | |
312 | return policy_path | |
313 | ||
314 | raise cfg.ConfigFilesNotFoundError((path,)) | |
315 | ||
316 | def enforce(self, rule, target, creds, do_raise=False, | |
317 | exc=None, *args, **kwargs): | |
318 | """Checks authorization of a rule against the target and credentials. | |
319 | ||
320 | :param rule: A string or BaseCheck instance specifying the rule | |
321 | to evaluate. | |
322 | :param target: As much information about the object being operated | |
323 | on as possible, as a dictionary. | |
324 | :param creds: As much information about the user performing the | |
325 | action as possible, as a dictionary. | |
326 | :param do_raise: Whether to raise an exception or not if check | |
327 | fails. | |
328 | :param exc: Class of the exception to raise if the check fails. | |
329 | Any remaining arguments passed to enforce() (both | |
330 | positional and keyword arguments) will be passed to | |
331 | the exception class. If not specified, PolicyNotAuthorized | |
332 | will be used. | |
333 | ||
334 | :return: Returns False if the policy does not allow the action and | |
335 | exc is not provided; otherwise, returns a value that | |
336 | evaluates to True. Note: for rules using the "case" | |
337 | expression, this True value will be the specified string | |
338 | from the expression. | |
339 | """ | |
340 | ||
341 | self.load_rules() | |
342 | ||
343 | # Allow the rule to be a Check tree | |
344 | if isinstance(rule, BaseCheck): | |
345 | result = rule(target, creds, self) | |
346 | elif not self.rules: | |
347 | # No rules to reference means we're going to fail closed | |
348 | result = False | |
349 | else: | |
350 | try: | |
351 | # Evaluate the rule | |
352 | result = self.rules[rule](target, creds, self) | |
353 | except KeyError: | |
354 | LOG.debug("Rule [%s] doesn't exist" % rule) | |
355 | # If the rule doesn't exist, fail closed | |
356 | result = False | |
357 | ||
358 | # If it is False, raise the exception if requested | |
359 | if do_raise and not result: | |
360 | if exc: | |
361 | raise exc(*args, **kwargs) | |
362 | ||
363 | raise PolicyNotAuthorized(rule) | |
364 | ||
365 | return result | |
366 | ||
367 | ||
368 | @six.add_metaclass(abc.ABCMeta) | |
369 | class BaseCheck(object): | |
370 | """Abstract base class for Check classes.""" | |
371 | ||
372 | @abc.abstractmethod | |
373 | def __str__(self): | |
374 | """String representation of the Check tree rooted at this node.""" | |
375 | ||
376 | pass | |
377 | ||
378 | @abc.abstractmethod | |
379 | def __call__(self, target, cred, enforcer): | |
380 | """Triggers if instance of the class is called. | |
381 | ||
382 | Performs the check. Returns False to reject the access or a | |
383 | true value (not necessary True) to accept the access. | |
384 | """ | |
385 | ||
386 | pass | |
387 | ||
388 | ||
389 | class FalseCheck(BaseCheck): | |
390 | """A policy check that always returns False (disallow).""" | |
391 | ||
392 | def __str__(self): | |
393 | """Return a string representation of this check.""" | |
394 | ||
395 | return "!" | |
396 | ||
397 | def __call__(self, target, cred, enforcer): | |
398 | """Check the policy.""" | |
399 | ||
400 | return False | |
401 | ||
402 | ||
403 | class TrueCheck(BaseCheck): | |
404 | """A policy check that always returns True (allow).""" | |
405 | ||
406 | def __str__(self): | |
407 | """Return a string representation of this check.""" | |
408 | ||
409 | return "@" | |
410 | ||
411 | def __call__(self, target, cred, enforcer): | |
412 | """Check the policy.""" | |
413 | ||
414 | return True | |
415 | ||
416 | ||
417 | class Check(BaseCheck): | |
418 | """A base class to allow for user-defined policy checks.""" | |
419 | ||
420 | def __init__(self, kind, match): | |
421 | """Initiates Check instance. | |
422 | ||
423 | :param kind: The kind of the check, i.e., the field before the | |
424 | ':'. | |
425 | :param match: The match of the check, i.e., the field after | |
426 | the ':'. | |
427 | """ | |
428 | ||
429 | self.kind = kind | |
430 | self.match = match | |
431 | ||
432 | def __str__(self): | |
433 | """Return a string representation of this check.""" | |
434 | ||
435 | return "%s:%s" % (self.kind, self.match) | |
436 | ||
437 | ||
438 | class NotCheck(BaseCheck): | |
439 | """Implements the "not" logical operator. | |
440 | ||
441 | A policy check that inverts the result of another policy check. | |
442 | """ | |
443 | ||
444 | def __init__(self, rule): | |
445 | """Initialize the 'not' check. | |
446 | ||
447 | :param rule: The rule to negate. Must be a Check. | |
448 | """ | |
449 | ||
450 | self.rule = rule | |
451 | ||
452 | def __str__(self): | |
453 | """Return a string representation of this check.""" | |
454 | ||
455 | return "not %s" % self.rule | |
456 | ||
457 | def __call__(self, target, cred, enforcer): | |
458 | """Check the policy. | |
459 | ||
460 | Returns the logical inverse of the wrapped check. | |
461 | """ | |
462 | ||
463 | return not self.rule(target, cred, enforcer) | |
464 | ||
465 | ||
466 | class AndCheck(BaseCheck): | |
467 | """Implements the "and" logical operator. | |
468 | ||
469 | A policy check that requires that a list of other checks all return True. | |
470 | """ | |
471 | ||
472 | def __init__(self, rules): | |
473 | """Initialize the 'and' check. | |
474 | ||
475 | :param rules: A list of rules that will be tested. | |
476 | """ | |
477 | ||
478 | self.rules = rules | |
479 | ||
480 | def __str__(self): | |
481 | """Return a string representation of this check.""" | |
482 | ||
483 | return "(%s)" % ' and '.join(str(r) for r in self.rules) | |
484 | ||
485 | def __call__(self, target, cred, enforcer): | |
486 | """Check the policy. | |
487 | ||
488 | Requires that all rules accept in order to return True. | |
489 | """ | |
490 | ||
491 | for rule in self.rules: | |
492 | if not rule(target, cred, enforcer): | |
493 | return False | |
494 | ||
495 | return True | |
496 | ||
497 | def add_check(self, rule): | |
498 | """Adds rule to be tested. | |
499 | ||
500 | Allows addition of another rule to the list of rules that will | |
501 | be tested. Returns the AndCheck object for convenience. | |
502 | """ | |
503 | ||
504 | self.rules.append(rule) | |
505 | return self | |
506 | ||
507 | ||
508 | class OrCheck(BaseCheck): | |
509 | """Implements the "or" operator. | |
510 | ||
511 | A policy check that requires that at least one of a list of other | |
512 | checks returns True. | |
513 | """ | |
514 | ||
515 | def __init__(self, rules): | |
516 | """Initialize the 'or' check. | |
517 | ||
518 | :param rules: A list of rules that will be tested. | |
519 | """ | |
520 | ||
521 | self.rules = rules | |
522 | ||
523 | def __str__(self): | |
524 | """Return a string representation of this check.""" | |
525 | ||
526 | return "(%s)" % ' or '.join(str(r) for r in self.rules) | |
527 | ||
528 | def __call__(self, target, cred, enforcer): | |
529 | """Check the policy. | |
530 | ||
531 | Requires that at least one rule accept in order to return True. | |
532 | """ | |
533 | ||
534 | for rule in self.rules: | |
535 | if rule(target, cred, enforcer): | |
536 | return True | |
537 | return False | |
538 | ||
539 | def add_check(self, rule): | |
540 | """Adds rule to be tested. | |
541 | ||
542 | Allows addition of another rule to the list of rules that will | |
543 | be tested. Returns the OrCheck object for convenience. | |
544 | """ | |
545 | ||
546 | self.rules.append(rule) | |
547 | return self | |
548 | ||
549 | ||
550 | def _parse_check(rule): | |
551 | """Parse a single base check rule into an appropriate Check object.""" | |
552 | ||
553 | # Handle the special checks | |
554 | if rule == '!': | |
555 | return FalseCheck() | |
556 | elif rule == '@': | |
557 | return TrueCheck() | |
558 | ||
559 | try: | |
560 | kind, match = rule.split(':', 1) | |
561 | except Exception: | |
562 | LOG.exception(_LE("Failed to understand rule %s") % rule) | |
563 | # If the rule is invalid, we'll fail closed | |
564 | return FalseCheck() | |
565 | ||
566 | # Find what implements the check | |
567 | if kind in _checks: | |
568 | return _checks[kind](kind, match) | |
569 | elif None in _checks: | |
570 | return _checks[None](kind, match) | |
571 | else: | |
572 | LOG.error(_LE("No handler for matches of kind %s") % kind) | |
573 | return FalseCheck() | |
574 | ||
575 | ||
576 | def _parse_list_rule(rule): | |
577 | """Translates the old list-of-lists syntax into a tree of Check objects. | |
578 | ||
579 | Provided for backwards compatibility. | |
580 | """ | |
581 | ||
582 | # Empty rule defaults to True | |
583 | if not rule: | |
584 | return TrueCheck() | |
585 | ||
586 | # Outer list is joined by "or"; inner list by "and" | |
587 | or_list = [] | |
588 | for inner_rule in rule: | |
589 | # Elide empty inner lists | |
590 | if not inner_rule: | |
591 | continue | |
592 | ||
593 | # Handle bare strings | |
594 | if isinstance(inner_rule, six.string_types): | |
595 | inner_rule = [inner_rule] | |
596 | ||
597 | # Parse the inner rules into Check objects | |
598 | and_list = [_parse_check(r) for r in inner_rule] | |
599 | ||
600 | # Append the appropriate check to the or_list | |
601 | if len(and_list) == 1: | |
602 | or_list.append(and_list[0]) | |
603 | else: | |
604 | or_list.append(AndCheck(and_list)) | |
605 | ||
606 | # If we have only one check, omit the "or" | |
607 | if not or_list: | |
608 | return FalseCheck() | |
609 | elif len(or_list) == 1: | |
610 | return or_list[0] | |
611 | ||
612 | return OrCheck(or_list) | |
613 | ||
614 | ||
615 | # Used for tokenizing the policy language | |
616 | _tokenize_re = re.compile(r'\s+') | |
617 | ||
618 | ||
619 | def _parse_tokenize(rule): | |
620 | """Tokenizer for the policy language. | |
621 | ||
622 | Most of the single-character tokens are specified in the | |
623 | _tokenize_re; however, parentheses need to be handled specially, | |
624 | because they can appear inside a check string. Thankfully, those | |
625 | parentheses that appear inside a check string can never occur at | |
626 | the very beginning or end ("%(variable)s" is the correct syntax). | |
627 | """ | |
628 | ||
629 | for tok in _tokenize_re.split(rule): | |
630 | # Skip empty tokens | |
631 | if not tok or tok.isspace(): | |
632 | continue | |
633 | ||
634 | # Handle leading parens on the token | |
635 | clean = tok.lstrip('(') | |
636 | for i in range(len(tok) - len(clean)): | |
637 | yield '(', '(' | |
638 | ||
639 | # If it was only parentheses, continue | |
640 | if not clean: | |
641 | continue | |
642 | else: | |
643 | tok = clean | |
644 | ||
645 | # Handle trailing parens on the token | |
646 | clean = tok.rstrip(')') | |
647 | trail = len(tok) - len(clean) | |
648 | ||
649 | # Yield the cleaned token | |
650 | lowered = clean.lower() | |
651 | if lowered in ('and', 'or', 'not'): | |
652 | # Special tokens | |
653 | yield lowered, clean | |
654 | elif clean: | |
655 | # Not a special token, but not composed solely of ')' | |
656 | if len(tok) >= 2 and ((tok[0], tok[-1]) in | |
657 | [('"', '"'), ("'", "'")]): | |
658 | # It's a quoted string | |
659 | yield 'string', tok[1:-1] | |
660 | else: | |
661 | yield 'check', _parse_check(clean) | |
662 | ||
663 | # Yield the trailing parens | |
664 | for i in range(trail): | |
665 | yield ')', ')' | |
666 | ||
667 | ||
668 | class ParseStateMeta(type): | |
669 | """Metaclass for the ParseState class. | |
670 | ||
671 | Facilitates identifying reduction methods. | |
672 | """ | |
673 | ||
674 | def __new__(mcs, name, bases, cls_dict): | |
675 | """Create the class. | |
676 | ||
677 | Injects the 'reducers' list, a list of tuples matching token sequences | |
678 | to the names of the corresponding reduction methods. | |
679 | """ | |
680 | ||
681 | reducers = [] | |
682 | ||
683 | for key, value in cls_dict.items(): | |
684 | if not hasattr(value, 'reducers'): | |
685 | continue | |
686 | for reduction in value.reducers: | |
687 | reducers.append((reduction, key)) | |
688 | ||
689 | cls_dict['reducers'] = reducers | |
690 | ||
691 | return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) | |
692 | ||
693 | ||
694 | def reducer(*tokens): | |
695 | """Decorator for reduction methods. | |
696 | ||
697 | Arguments are a sequence of tokens, in order, which should trigger running | |
698 | this reduction method. | |
699 | """ | |
700 | ||
701 | def decorator(func): | |
702 | # Make sure we have a list of reducer sequences | |
703 | if not hasattr(func, 'reducers'): | |
704 | func.reducers = [] | |
705 | ||
706 | # Add the tokens to the list of reducer sequences | |
707 | func.reducers.append(list(tokens)) | |
708 | ||
709 | return func | |
710 | ||
711 | return decorator | |
712 | ||
713 | ||
714 | @six.add_metaclass(ParseStateMeta) | |
715 | class ParseState(object): | |
716 | """Implement the core of parsing the policy language. | |
717 | ||
718 | Uses a greedy reduction algorithm to reduce a sequence of tokens into | |
719 | a single terminal, the value of which will be the root of the Check tree. | |
720 | ||
721 | Note: error reporting is rather lacking. The best we can get with | |
722 | this parser formulation is an overall "parse failed" error. | |
723 | Fortunately, the policy language is simple enough that this | |
724 | shouldn't be that big a problem. | |
725 | """ | |
726 | ||
727 | def __init__(self): | |
728 | """Initialize the ParseState.""" | |
729 | ||
730 | self.tokens = [] | |
731 | self.values = [] | |
732 | ||
733 | def reduce(self): | |
734 | """Perform a greedy reduction of the token stream. | |
735 | ||
736 | If a reducer method matches, it will be executed, then the | |
737 | reduce() method will be called recursively to search for any more | |
738 | possible reductions. | |
739 | """ | |
740 | ||
741 | for reduction, methname in self.reducers: | |
742 | if (len(self.tokens) >= len(reduction) and | |
743 | self.tokens[-len(reduction):] == reduction): | |
744 | # Get the reduction method | |
745 | meth = getattr(self, methname) | |
746 | ||
747 | # Reduce the token stream | |
748 | results = meth(*self.values[-len(reduction):]) | |
749 | ||
750 | # Update the tokens and values | |
751 | self.tokens[-len(reduction):] = [r[0] for r in results] | |
752 | self.values[-len(reduction):] = [r[1] for r in results] | |
753 | ||
754 | # Check for any more reductions | |
755 | return self.reduce() | |
756 | ||
757 | def shift(self, tok, value): | |
758 | """Adds one more token to the state. Calls reduce().""" | |
759 | ||
760 | self.tokens.append(tok) | |
761 | self.values.append(value) | |
762 | ||
763 | # Do a greedy reduce... | |
764 | self.reduce() | |
765 | ||
766 | @property | |
767 | def result(self): | |
768 | """Obtain the final result of the parse. | |
769 | ||
770 | Raises ValueError if the parse failed to reduce to a single result. | |
771 | """ | |
772 | ||
773 | if len(self.values) != 1: | |
774 | raise ValueError("Could not parse rule") | |
775 | return self.values[0] | |
776 | ||
777 | @reducer('(', 'check', ')') | |
778 | @reducer('(', 'and_expr', ')') | |
779 | @reducer('(', 'or_expr', ')') | |
780 | def _wrap_check(self, _p1, check, _p2): | |
781 | """Turn parenthesized expressions into a 'check' token.""" | |
782 | ||
783 | return [('check', check)] | |
784 | ||
785 | @reducer('check', 'and', 'check') | |
786 | def _make_and_expr(self, check1, _and, check2): | |
787 | """Create an 'and_expr'. | |
788 | ||
789 | Join two checks by the 'and' operator. | |
790 | """ | |
791 | ||
792 | return [('and_expr', AndCheck([check1, check2]))] | |
793 | ||
794 | @reducer('and_expr', 'and', 'check') | |
795 | def _extend_and_expr(self, and_expr, _and, check): | |
796 | """Extend an 'and_expr' by adding one more check.""" | |
797 | ||
798 | return [('and_expr', and_expr.add_check(check))] | |
799 | ||
800 | @reducer('check', 'or', 'check') | |
801 | def _make_or_expr(self, check1, _or, check2): | |
802 | """Create an 'or_expr'. | |
803 | ||
804 | Join two checks by the 'or' operator. | |
805 | """ | |
806 | ||
807 | return [('or_expr', OrCheck([check1, check2]))] | |
808 | ||
809 | @reducer('or_expr', 'or', 'check') | |
810 | def _extend_or_expr(self, or_expr, _or, check): | |
811 | """Extend an 'or_expr' by adding one more check.""" | |
812 | ||
813 | return [('or_expr', or_expr.add_check(check))] | |
814 | ||
815 | @reducer('not', 'check') | |
816 | def _make_not_expr(self, _not, check): | |
817 | """Invert the result of another check.""" | |
818 | ||
819 | return [('check', NotCheck(check))] | |
820 | ||
821 | ||
822 | def _parse_text_rule(rule): | |
823 | """Parses policy to the tree. | |
824 | ||
825 | Translates a policy written in the policy language into a tree of | |
826 | Check objects. | |
827 | """ | |
828 | ||
829 | # Empty rule means always accept | |
830 | if not rule: | |
831 | return TrueCheck() | |
832 | ||
833 | # Parse the token stream | |
834 | state = ParseState() | |
835 | for tok, value in _parse_tokenize(rule): | |
836 | state.shift(tok, value) | |
837 | ||
838 | try: | |
839 | return state.result | |
840 | except ValueError: | |
841 | # Couldn't parse the rule | |
842 | LOG.exception(_LE("Failed to understand rule %s") % rule) | |
843 | ||
844 | # Fail closed | |
845 | return FalseCheck() | |
846 | ||
847 | ||
848 | def parse_rule(rule): | |
849 | """Parses a policy rule into a tree of Check objects.""" | |
850 | ||
851 | # If the rule is a string, it's in the policy language | |
852 | if isinstance(rule, six.string_types): | |
853 | return _parse_text_rule(rule) | |
854 | return _parse_list_rule(rule) | |
855 | ||
856 | ||
857 | def register(name, func=None): | |
858 | """Register a function or Check class as a policy check. | |
859 | ||
860 | :param name: Gives the name of the check type, e.g., 'rule', | |
861 | 'role', etc. If name is None, a default check type | |
862 | will be registered. | |
863 | :param func: If given, provides the function or class to register. | |
864 | If not given, returns a function taking one argument | |
865 | to specify the function or class to register, | |
866 | allowing use as a decorator. | |
867 | """ | |
868 | ||
869 | # Perform the actual decoration by registering the function or | |
870 | # class. Returns the function or class for compliance with the | |
871 | # decorator interface. | |
872 | def decorator(func): | |
873 | _checks[name] = func | |
874 | return func | |
875 | ||
876 | # If the function or class is given, do the registration | |
877 | if func: | |
878 | return decorator(func) | |
879 | ||
880 | return decorator | |
881 | ||
882 | ||
883 | @register("rule") | |
884 | class RuleCheck(Check): | |
885 | def __call__(self, target, creds, enforcer): | |
886 | """Recursively checks credentials based on the defined rules.""" | |
887 | ||
888 | try: | |
889 | return enforcer.rules[self.match](target, creds, enforcer) | |
890 | except KeyError: | |
891 | # We don't have any matching rule; fail closed | |
892 | return False | |
893 | ||
894 | ||
895 | @register("role") | |
896 | class RoleCheck(Check): | |
897 | def __call__(self, target, creds, enforcer): | |
898 | """Check that there is a matching role in the cred dict.""" | |
899 | ||
900 | return self.match.lower() in [x.lower() for x in creds['roles']] | |
901 | ||
902 | ||
903 | @register('http') | |
904 | class HttpCheck(Check): | |
905 | def __call__(self, target, creds, enforcer): | |
906 | """Check http: rules by calling to a remote server. | |
907 | ||
908 | This example implementation simply verifies that the response | |
909 | is exactly 'True'. | |
910 | """ | |
911 | ||
912 | url = ('http:' + self.match) % target | |
913 | ||
914 | # Convert instances of object() in target temporarily to | |
915 | # empty dict to avoid circular reference detection | |
916 | # errors in jsonutils.dumps(). | |
917 | temp_target = copy.deepcopy(target) | |
918 | for key in target.keys(): | |
919 | element = target.get(key) | |
920 | if type(element) is object: | |
921 | temp_target[key] = {} | |
922 | ||
923 | data = {'target': jsonutils.dumps(temp_target), | |
924 | 'credentials': jsonutils.dumps(creds)} | |
925 | post_data = urlparse.urlencode(data) | |
926 | f = urlrequest.urlopen(url, post_data) | |
927 | return f.read() == "True" | |
928 | ||
929 | ||
930 | @register(None) | |
931 | class GenericCheck(Check): | |
932 | def __call__(self, target, creds, enforcer): | |
933 | """Check an individual match. | |
934 | ||
935 | Matches look like: | |
936 | ||
937 | tenant:%(tenant_id)s | |
938 | role:compute:admin | |
939 | True:%(user.enabled)s | |
940 | 'Member':%(role.name)s | |
941 | """ | |
942 | ||
943 | try: | |
944 | match = self.match % target | |
945 | except KeyError: | |
946 | # While doing GenericCheck if key not | |
947 | # present in Target return false | |
948 | return False | |
949 | ||
950 | try: | |
951 | # Try to interpret self.kind as a literal | |
952 | leftval = ast.literal_eval(self.kind) | |
953 | except ValueError: | |
954 | try: | |
955 | kind_parts = self.kind.split('.') | |
956 | leftval = creds | |
957 | for kind_part in kind_parts: | |
958 | leftval = leftval[kind_part] | |
959 | except KeyError: | |
960 | return False | |
961 | return match == six.text_type(leftval) |