New upstream version 2.0.2
Johannes Wienke
5 years ago
8 | 8 | __pycache__ |
9 | 9 | /pytestdebug.log |
10 | 10 | /doc/build/ |
11 | /env/ | |
12 | /.ropeproject/ | |
13 | /.mypy_cache/ | |
14 | /.pytest_cache/ | |
15 | /.python-version | |
16 | /.tox/ | |
17 | /Session.vim |
0 | 0 | language: python |
1 | 1 | sudo: false |
2 | python: | |
3 | - 3.5 | |
4 | - 3.6 | |
5 | - nightly | |
2 | 6 | env: |
3 | - PSUTIL_VERSION=5.0.1 | |
4 | - PSUTIL_VERSION=5.4.1 | |
5 | python: | |
6 | - "3.5" | |
7 | - "3.6" | |
8 | - "nightly" | |
7 | - TASK=test | |
8 | - TASK=integration | |
9 | matrix: | |
10 | include: | |
11 | - python: 3.6 | |
12 | env: TASK=checks | |
13 | exclude: | |
14 | - python: nightly | |
15 | env: TASK=integration | |
9 | 16 | install: |
10 | - pip install pytest pytest-runner pytest-cov pytest-mock python-mpd2 psutil==$PSUTIL_VERSION requests codecov lxml sphinx sphinx_rtd_theme | |
17 | - pip install tox-travis | |
11 | 18 | script: |
12 | - pytest --cov=autosuspend | |
13 | - python setup.py build_sphinx | |
14 | after_success: | |
15 | - codecov | |
19 | - tox |
0 | include VERSION |
0 | #!/usr/bin/env python3 | |
1 | """A daemon to suspend a system on inactivity.""" | |
2 | ||
3 | import abc | |
4 | import argparse | |
5 | import configparser | |
6 | import copy | |
7 | import functools | |
8 | import glob | |
9 | import logging | |
10 | import logging.config | |
11 | import os | |
12 | import os.path | |
13 | import pwd | |
14 | import re | |
15 | import socket | |
16 | import subprocess | |
17 | import time | |
18 | from typing import Callable, Iterable, List, Optional, Sequence | |
19 | ||
20 | import psutil | |
21 | ||
22 | ||
23 | # pylint: disable=invalid-name | |
24 | _logger = logging.getLogger() | |
25 | # pylint: enable=invalid-name | |
26 | ||
27 | ||
28 | class ConfigurationError(RuntimeError): | |
29 | """Indicates an error in the configuration of a :class:`Check`.""" | |
30 | ||
31 | pass | |
32 | ||
33 | ||
34 | class TemporaryCheckError(RuntimeError): | |
35 | """Indicates a temporary error while performing a check. | |
36 | ||
37 | Such an error can be ignored for some time since it might recover | |
38 | automatically. | |
39 | """ | |
40 | ||
41 | pass | |
42 | ||
43 | ||
44 | class SevereCheckError(RuntimeError): | |
45 | """Indicates a sever check error that will probably not recover. | |
46 | ||
47 | There no hope this situation recovers. | |
48 | """ | |
49 | ||
50 | pass | |
51 | ||
52 | ||
53 | class Check(object): | |
54 | """Base class for checks. | |
55 | ||
56 | Subclasses must call this class' ``__init__`` method. | |
57 | ||
58 | Args: | |
59 | name (str): | |
60 | Configured name of the check | |
61 | """ | |
62 | ||
63 | @classmethod | |
64 | @abc.abstractmethod | |
65 | def create(cls, name: str, config: configparser.SectionProxy) -> 'Check': | |
66 | """Create a new check instance from the provided configuration. | |
67 | ||
68 | Args: | |
69 | name (str): | |
70 | user-defined name for the check | |
71 | config (configparser.SectionProxy): | |
72 | config parser section with the configuration for this check | |
73 | ||
74 | Raises: | |
75 | ConfigurationError: | |
76 | Configuration for this check is inappropriate | |
77 | ||
78 | """ | |
79 | pass | |
80 | ||
81 | def __init__(self, name: str=None) -> None: | |
82 | if name: | |
83 | self.name = name | |
84 | else: | |
85 | self.name = self.__class__.__name__ | |
86 | self.logger = logging.getLogger( | |
87 | 'check.{}'.format(self.name)) | |
88 | ||
89 | @abc.abstractmethod | |
90 | def check(self) -> Optional[str]: | |
91 | """Determine if system activity exists that prevents suspending. | |
92 | ||
93 | Returns: | |
94 | str: | |
95 | A string describing which condition currently prevents sleep, | |
96 | else ``None``. | |
97 | ||
98 | Raises: | |
99 | TemporaryCheckError: | |
100 | Check execution currently fails but might recover later | |
101 | SevereCheckError: | |
102 | Check executions fails severely | |
103 | """ | |
104 | pass | |
105 | ||
106 | def __str__(self) -> str: | |
107 | return '{name}[class={clazz}]'.format(name=self.name, | |
108 | clazz=self.__class__.__name__) | |
109 | ||
110 | ||
111 | class ActiveConnection(Check): | |
112 | """Checks if a client connection exists on specified ports.""" | |
113 | ||
114 | @classmethod | |
115 | def create(cls, name, config): | |
116 | try: | |
117 | ports = config['ports'] | |
118 | ports = ports.split(',') | |
119 | ports = [p.strip() for p in ports] | |
120 | ports = set([int(p) for p in ports]) | |
121 | return cls(name, ports) | |
122 | except KeyError: | |
123 | raise ConfigurationError('Missing option ports') | |
124 | except ValueError: | |
125 | raise ConfigurationError('Ports must be integers') | |
126 | ||
127 | def __init__(self, name, ports): | |
128 | Check.__init__(self, name) | |
129 | self._ports = ports | |
130 | ||
131 | def check(self): | |
132 | own_addresses = [(item.family, item.address) | |
133 | for sublist in psutil.net_if_addrs().values() | |
134 | for item in sublist] | |
135 | connected = [c.laddr[1] | |
136 | for c in psutil.net_connections() | |
137 | if ((c.family, c.laddr[0]) in own_addresses | |
138 | and c.status == 'ESTABLISHED' | |
139 | and c.laddr[1] in self._ports)] | |
140 | if connected: | |
141 | return 'Ports {} are connected'.format(connected) | |
142 | ||
143 | ||
144 | class ExternalCommand(Check): | |
145 | ||
146 | @classmethod | |
147 | def create(cls, name, config): | |
148 | try: | |
149 | return cls(name, config['command'].strip()) | |
150 | except KeyError as error: | |
151 | raise ConfigurationError('Missing command specification') | |
152 | ||
153 | def __init__(self, name, command): | |
154 | Check.__init__(self, name) | |
155 | self._command = command | |
156 | ||
157 | def check(self): | |
158 | try: | |
159 | subprocess.check_call(self._command, shell=True) | |
160 | return 'Command {} succeeded'.format(self._command) | |
161 | except subprocess.CalledProcessError as error: | |
162 | return None | |
163 | ||
164 | ||
165 | class Kodi(Check): | |
166 | ||
167 | @classmethod | |
168 | def create(cls, name, config): | |
169 | try: | |
170 | url = config.get('url', fallback='http://localhost:8080/jsonrpc') | |
171 | timeout = config.getint('timeout', fallback=5) | |
172 | return cls(name, url, timeout) | |
173 | except ValueError as error: | |
174 | raise ConfigurationError( | |
175 | 'Url or timeout configuration wrong: {}'.format(error)) | |
176 | ||
177 | def __init__(self, name, url, timeout): | |
178 | Check.__init__(self, name) | |
179 | self._url = url | |
180 | self._timeout = timeout | |
181 | ||
182 | def check(self): | |
183 | import requests | |
184 | import requests.exceptions | |
185 | ||
186 | try: | |
187 | reply = requests.get(self._url + | |
188 | '?request={"jsonrpc": "2.0", ' | |
189 | '"id": 1, ' | |
190 | '"method": "Player.GetActivePlayers"}', | |
191 | timeout=self._timeout).json() | |
192 | if 'result' not in reply: | |
193 | raise TemporaryCheckError('No result array in reply') | |
194 | if reply['result']: | |
195 | return "Kodi currently playing" | |
196 | else: | |
197 | return None | |
198 | except requests.exceptions.RequestException as error: | |
199 | raise TemporaryCheckError(error) | |
200 | ||
201 | ||
202 | class Load(Check): | |
203 | ||
204 | @classmethod | |
205 | def create(cls, name, config): | |
206 | try: | |
207 | return cls(name, | |
208 | config.getfloat('threshold', fallback=2.5)) | |
209 | except ValueError as error: | |
210 | raise ConfigurationError( | |
211 | 'Unable to parse threshold as float: {}'.format(error)) | |
212 | ||
213 | def __init__(self, name, threshold): | |
214 | Check.__init__(self, name) | |
215 | self._threshold = threshold | |
216 | ||
217 | def check(self): | |
218 | loadcurrent = os.getloadavg()[1] | |
219 | self.logger.debug("Load: %s", loadcurrent) | |
220 | if loadcurrent > self._threshold: | |
221 | return 'Load {} > threshold {}'.format(loadcurrent, | |
222 | self._threshold) | |
223 | else: | |
224 | return None | |
225 | ||
226 | ||
227 | class Mpd(Check): | |
228 | ||
229 | @classmethod | |
230 | def create(cls, name, config): | |
231 | try: | |
232 | host = config.get('host', fallback='localhost') | |
233 | port = config.getint('port', fallback=6600) | |
234 | timeout = config.getint('timeout', fallback=5) | |
235 | return cls(name, host, port, timeout) | |
236 | except ValueError as error: | |
237 | raise ConfigurationError( | |
238 | 'Host port or timeout configuration wrong: {}'.format(error)) | |
239 | ||
240 | def __init__(self, name, host, port, timeout): | |
241 | Check.__init__(self, name) | |
242 | self._host = host | |
243 | self._port = port | |
244 | self._timeout = timeout | |
245 | ||
246 | def _get_state(self): | |
247 | from mpd import MPDClient | |
248 | client = MPDClient() | |
249 | client.timeout = self._timeout | |
250 | client.connect(self._host, self._port) | |
251 | state = client.status() | |
252 | client.close() | |
253 | client.disconnect() | |
254 | return state | |
255 | ||
256 | def check(self): | |
257 | try: | |
258 | state = self._get_state() | |
259 | if state['state'] == 'play': | |
260 | return 'MPD currently playing' | |
261 | else: | |
262 | return None | |
263 | except (ConnectionError, | |
264 | ConnectionRefusedError, | |
265 | socket.timeout, | |
266 | socket.gaierror) as error: | |
267 | raise TemporaryCheckError(error) | |
268 | ||
269 | ||
270 | class NetworkBandwidth(Check): | |
271 | ||
272 | @classmethod | |
273 | def create(cls, name, config): | |
274 | try: | |
275 | interfaces = config['interfaces'] | |
276 | interfaces = interfaces.split(',') | |
277 | interfaces = [i.strip() for i in interfaces if i.strip()] | |
278 | if not interfaces: | |
279 | raise ConfigurationError('No interfaces configured') | |
280 | host_interfaces = psutil.net_if_addrs().keys() | |
281 | for interface in interfaces: | |
282 | if interface not in host_interfaces: | |
283 | raise ConfigurationError( | |
284 | 'Network interface {} does not exist'.format( | |
285 | interface)) | |
286 | threshold_send = config.getfloat('threshold_send', | |
287 | fallback=100) | |
288 | threshold_receive = config.getfloat('threshold_receive', | |
289 | fallback=100) | |
290 | return cls(name, interfaces, threshold_send, threshold_receive) | |
291 | except KeyError as error: | |
292 | raise ConfigurationError( | |
293 | 'Missing configuration key: {}'.format(error)) | |
294 | except ValueError as error: | |
295 | raise ConfigurationError( | |
296 | 'Threshold in wrong format: {}'.format(error)) | |
297 | ||
298 | def __init__(self, name, interfaces, threshold_send, threshold_receive): | |
299 | Check.__init__(self, name) | |
300 | self._interfaces = interfaces | |
301 | self._threshold_send = threshold_send | |
302 | self._threshold_receive = threshold_receive | |
303 | self._previous_values = psutil.net_io_counters(pernic=True) | |
304 | self._previous_time = time.time() | |
305 | ||
306 | def check(self): | |
307 | new_values = psutil.net_io_counters(pernic=True) | |
308 | new_time = time.time() | |
309 | for interface in self._interfaces: | |
310 | if interface not in new_values or \ | |
311 | interface not in self._previous_values: | |
312 | raise TemporaryCheckError( | |
313 | 'Interface {} is missing'.format(interface)) | |
314 | ||
315 | # send direction | |
316 | delta_send = new_values[interface].bytes_sent - \ | |
317 | self._previous_values[interface].bytes_sent | |
318 | rate_send = delta_send / (new_time - self._previous_time) | |
319 | if rate_send > self._threshold_send: | |
320 | return 'Interface {} sending rate {} byte/s '\ | |
321 | 'higher than threshold {}'.format( | |
322 | interface, rate_send, self._threshold_send) | |
323 | ||
324 | delta_receive = new_values[interface].bytes_recv - \ | |
325 | self._previous_values[interface].bytes_recv | |
326 | rate_receive = delta_receive / (new_time - self._previous_time) | |
327 | if rate_receive > self._threshold_receive: | |
328 | return 'Interface {} receive rate {} byte/s '\ | |
329 | 'higher than threshold {}'.format( | |
330 | interface, rate_receive, self._threshold_receive) | |
331 | ||
332 | ||
333 | class Ping(Check): | |
334 | """Check if one or several hosts are reachable via ping.""" | |
335 | ||
336 | @classmethod | |
337 | def create(cls, name, config): | |
338 | try: | |
339 | hosts = config['hosts'].split(',') | |
340 | hosts = [h.strip() for h in hosts] | |
341 | return cls(name, hosts) | |
342 | except KeyError as error: | |
343 | raise ConfigurationError( | |
344 | 'Unable to determine hosts to ping: {}'.format(error)) | |
345 | ||
346 | def __init__(self, name, hosts): | |
347 | Check.__init__(self, name) | |
348 | self._hosts = hosts | |
349 | ||
350 | def check(self): | |
351 | for host in self._hosts: | |
352 | cmd = ['ping', '-q', '-c', '1', host] | |
353 | if subprocess.call(cmd, | |
354 | stdout=subprocess.DEVNULL, | |
355 | stderr=subprocess.DEVNULL) == 0: | |
356 | self.logger.debug("host " + host + " appears to be up") | |
357 | return 'Host {} is up'.format(host) | |
358 | return None | |
359 | ||
360 | ||
361 | class Processes(Check): | |
362 | ||
363 | @classmethod | |
364 | def create(cls, name, config): | |
365 | try: | |
366 | processes = config['processes'].split(',') | |
367 | processes = [p.strip() for p in processes] | |
368 | return cls(name, processes) | |
369 | except KeyError: | |
370 | raise ConfigurationError('No processes to check specified') | |
371 | ||
372 | def __init__(self, name, processes): | |
373 | Check.__init__(self, name) | |
374 | self._processes = processes | |
375 | ||
376 | def check(self): | |
377 | for proc in psutil.process_iter(): | |
378 | try: | |
379 | pinfo = proc.name() | |
380 | for name in self._processes: | |
381 | if pinfo == name: | |
382 | return 'Process {} is running'.format(name) | |
383 | except psutil.NoSuchProcess: | |
384 | pass | |
385 | return None | |
386 | ||
387 | ||
388 | class Smb(Check): | |
389 | ||
390 | @classmethod | |
391 | def create(cls, name, config): | |
392 | return cls(name) | |
393 | ||
394 | def check(self): | |
395 | try: | |
396 | status_output = subprocess.check_output( | |
397 | ['smbstatus', '-b']).decode('utf-8') | |
398 | except subprocess.CalledProcessError as error: | |
399 | raise SevereCheckError(error) | |
400 | ||
401 | self.logger.debug('Received status output:\n%s', | |
402 | status_output) | |
403 | ||
404 | connections = [] | |
405 | start_seen = False | |
406 | for line in status_output.splitlines(): | |
407 | if start_seen: | |
408 | connections.append(line) | |
409 | else: | |
410 | if line.startswith('----'): | |
411 | start_seen = True | |
412 | ||
413 | if connections: | |
414 | return 'SMB clients are connected:\n{}'.format( | |
415 | '\n'.join(connections)) | |
416 | else: | |
417 | return None | |
418 | ||
419 | ||
420 | class Users(Check): | |
421 | ||
422 | @classmethod | |
423 | def create(cls, name, config): | |
424 | try: | |
425 | user_regex = re.compile( | |
426 | config.get('name', fallback='.*')) | |
427 | terminal_regex = re.compile( | |
428 | config.get('terminal', fallback='.*')) | |
429 | host_regex = re.compile( | |
430 | config.get('host', fallback='.*')) | |
431 | return cls(name, user_regex, terminal_regex, host_regex) | |
432 | except re.error as error: | |
433 | raise ConfigurationError( | |
434 | 'Regular expression is invalid: {}'.format(error)) | |
435 | ||
436 | def __init__(self, name, user_regex, terminal_regex, host_regex): | |
437 | Check.__init__(self, name) | |
438 | self._user_regex = user_regex | |
439 | self._terminal_regex = terminal_regex | |
440 | self._host_regex = host_regex | |
441 | ||
442 | def check(self): | |
443 | for entry in psutil.users(): | |
444 | if self._user_regex.fullmatch(entry.name) is not None and \ | |
445 | self._terminal_regex.fullmatch( | |
446 | entry.terminal) is not None and \ | |
447 | self._host_regex.fullmatch(entry.host) is not None: | |
448 | self.logger.debug('User %s on terminal %s from host %s ' | |
449 | 'matches criteria.', entry.name, | |
450 | entry.terminal, entry.host) | |
451 | return 'User {user} is logged in on terminal {terminal} ' \ | |
452 | 'from {host} since {started}'.format( | |
453 | user=entry.name, terminal=entry.terminal, | |
454 | host=entry.host, started=entry.started) | |
455 | return None | |
456 | ||
457 | ||
458 | def _list_logind_sessions(): | |
459 | """Lists running logind sessions and their properties. | |
460 | ||
461 | Returns: | |
462 | list of (session_id, properties dict): | |
463 | A list with tuples of sessions ids and their associated properties | |
464 | represented as dicts. | |
465 | """ | |
466 | import dbus | |
467 | bus = dbus.SystemBus() | |
468 | login1 = bus.get_object("org.freedesktop.login1", | |
469 | "/org/freedesktop/login1") | |
470 | ||
471 | sessions = login1.ListSessions( | |
472 | dbus_interface='org.freedesktop.login1.Manager') | |
473 | ||
474 | results = [] | |
475 | for session_id, path in [(s[0], s[4]) for s in sessions]: | |
476 | session = bus.get_object('org.freedesktop.login1', path) | |
477 | properties_interface = dbus.Interface( | |
478 | session, 'org.freedesktop.DBus.Properties') | |
479 | properties = properties_interface.GetAll( | |
480 | 'org.freedesktop.login1.Session') | |
481 | results.append((session_id, properties)) | |
482 | ||
483 | return results | |
484 | ||
485 | ||
486 | class XIdleTime(Check): | |
487 | """Check that local X display have been idle long enough.""" | |
488 | ||
489 | @classmethod | |
490 | def create(cls, name, config): | |
491 | try: | |
492 | return cls(name, config.getint('timeout', fallback=600), | |
493 | config.get('method', fallback='sockets'), | |
494 | re.compile(config.get('ignore_if_process', | |
495 | fallback=r'a^')), | |
496 | re.compile(config.get('ignore_users', | |
497 | fallback=r'a^'))) | |
498 | except re.error as error: | |
499 | raise ConfigurationError( | |
500 | 'Regular expression is invalid: {}'.format(error)) | |
501 | except ValueError as error: | |
502 | raise ConfigurationError( | |
503 | 'Unable to parse configuration: {}'.format(error)) | |
504 | ||
505 | def __init__(self, name, timeout, method, | |
506 | ignore_process_re, ignore_users_re): | |
507 | Check.__init__(self, name) | |
508 | self._timeout = timeout | |
509 | if method == 'sockets': | |
510 | self._provide_sessions = self._list_sessions_sockets | |
511 | elif method == 'logind': | |
512 | self._provide_sessions = self._list_sessions_logind | |
513 | else: | |
514 | raise ValueError( | |
515 | "Unknown session discovery method {}".format(method)) | |
516 | self._ignore_process_re = ignore_process_re | |
517 | self._ignore_users_re = ignore_users_re | |
518 | ||
519 | def _list_sessions_sockets(self): | |
520 | """Lists running X sessions by iterating the X sockets. | |
521 | ||
522 | This method assumes that X servers are run under the users using the | |
523 | server. | |
524 | """ | |
525 | sockets = glob.glob('/tmp/.X11-unix/X*') | |
526 | self.logger.debug('Found sockets: %s', sockets) | |
527 | ||
528 | results = [] | |
529 | for sock in sockets: | |
530 | # determine the number of the X display | |
531 | try: | |
532 | display = int(sock[len('/tmp/.X11-unix/X'):]) | |
533 | except ValueError as error: | |
534 | self.logger.warning( | |
535 | 'Cannot parse display number from socket %s. Skipping.', | |
536 | sock, exc_info=True) | |
537 | continue | |
538 | ||
539 | # determine the user of the display | |
540 | try: | |
541 | user = pwd.getpwuid(os.stat(sock).st_uid).pw_name | |
542 | except (FileNotFoundError, KeyError) as error: | |
543 | self.logger.warning( | |
544 | 'Cannot get the owning user from socket %s. Skipping.', | |
545 | sock, exc_info=True) | |
546 | continue | |
547 | ||
548 | results.append((display, user)) | |
549 | ||
550 | return results | |
551 | ||
552 | def _list_sessions_logind(self): | |
553 | """Lists running X sessions using logind. | |
554 | ||
555 | This method assumes that a ``Display`` variable is set in the logind | |
556 | sessions. | |
557 | """ | |
558 | results = [] | |
559 | for session_id, properties in _list_logind_sessions(): | |
560 | if 'Name' in properties and 'Display' in properties: | |
561 | try: | |
562 | results.append( | |
563 | (int(properties['Display'].replace(':', '')), | |
564 | str(properties['Name']))) | |
565 | except ValueError as e: | |
566 | self.logger.warn( | |
567 | 'Unable to parse display from session properties %s', | |
568 | properties, exc_info=True) | |
569 | else: | |
570 | self.logger.debug( | |
571 | 'Skipping session %s because it does not contain ' | |
572 | 'a user name and a display', session_id) | |
573 | return results | |
574 | ||
575 | def _is_skip_process_running(self, user): | |
576 | user_processes = [] | |
577 | for process in psutil.process_iter(): | |
578 | try: | |
579 | if process.username() == user: | |
580 | user_processes.append(process.name()) | |
581 | except (psutil.NoSuchProcess, | |
582 | psutil.ZombieProcess, | |
583 | psutil.AccessDenied): | |
584 | # ignore processes which have disappeared etc. | |
585 | pass | |
586 | ||
587 | for process in user_processes: | |
588 | if self._ignore_process_re.match(process) is not None: | |
589 | self.logger.debug( | |
590 | "Process %s with pid %s matches the ignore regex '%s'." | |
591 | " Skipping idle time check for this user.", | |
592 | process.name(), process.pid, self._ignore_process_re) | |
593 | return True | |
594 | ||
595 | return False | |
596 | ||
597 | def check(self): | |
598 | for display, user in self._provide_sessions(): | |
599 | self.logger.info('Checking display %s of user %s', display, user) | |
600 | ||
601 | # check whether this users should be ignored completely | |
602 | if self._ignore_users_re.match(user) is not None: | |
603 | self.logger.debug("Skipping user '%s' due to request", user) | |
604 | continue | |
605 | ||
606 | # check whether any of the running processes of this user matches | |
607 | # the ignore regular expression. In that case we skip idletime | |
608 | # checking because we assume the user has a process running that | |
609 | # inevitably tampers with the idle time. | |
610 | if self._is_skip_process_running(user): | |
611 | continue | |
612 | ||
613 | # prepare the environment for the xprintidle call | |
614 | env = copy.deepcopy(os.environ) | |
615 | env['DISPLAY'] = ':{}'.format(display) | |
616 | env['XAUTHORITY'] = os.path.join(os.path.expanduser('~' + user), | |
617 | '.Xauthority') | |
618 | ||
619 | try: | |
620 | idle_time = subprocess.check_output( | |
621 | ['sudo', '-u', user, 'xprintidle'], env=env) | |
622 | idle_time = float(idle_time.strip()) / 1000.0 | |
623 | except (subprocess.CalledProcessError, ValueError) as error: | |
624 | self.logger.warning( | |
625 | 'Unable to determine the idle time for display %s.', | |
626 | display, exc_info=True) | |
627 | raise TemporaryCheckError(error) | |
628 | ||
629 | self.logger.debug( | |
630 | 'Idle time for display %s of user %s is %s seconds.', | |
631 | display, user, idle_time) | |
632 | ||
633 | if idle_time < self._timeout: | |
634 | return 'X session {} of user {} ' \ | |
635 | 'has idle time {} < threshold {}'.format( | |
636 | display, user, idle_time, self._timeout) | |
637 | ||
638 | return None | |
639 | ||
640 | ||
641 | class LogindSessionsIdle(Check): | |
642 | """Prevents suspending in case a logind session is marked not idle. | |
643 | ||
644 | The decision is based on the ``IdleHint`` property of logind sessions. | |
645 | """ | |
646 | ||
647 | @classmethod | |
648 | def create(cls, name, config): | |
649 | types = config.get('types', fallback='tty,x11,wayland') | |
650 | types = [t.strip() for t in types.split(',')] | |
651 | states = config.get('states', fallback='active,online') | |
652 | states = [t.strip() for t in states.split(',')] | |
653 | return cls(name, types, states) | |
654 | ||
655 | def __init__(self, name, types, states): | |
656 | Check.__init__(self, name) | |
657 | self._types = types | |
658 | self._states = states | |
659 | ||
660 | def check(self): | |
661 | for session_id, properties in _list_logind_sessions(): | |
662 | self.logger.debug('Session %s properties: %s', | |
663 | session_id, properties) | |
664 | ||
665 | if properties['Type'] not in self._types: | |
666 | self.logger.debug('Ignoring session of wrong type %s', | |
667 | properties['type']) | |
668 | continue | |
669 | if properties['State'] not in self._states: | |
670 | self.logger.debug('Ignoring session because its state is %s', | |
671 | properties['State']) | |
672 | continue | |
673 | ||
674 | if properties['IdleHint'] == 'no': | |
675 | return 'Login session {} is not idle'.format( | |
676 | session_id, properties['IdleHint']) | |
677 | ||
678 | return None | |
679 | ||
680 | ||
681 | class XPath(Check): | |
682 | ||
683 | @classmethod | |
684 | def create(cls, name, config): | |
685 | from lxml import etree | |
686 | try: | |
687 | xpath = config['xpath'].strip() | |
688 | # validate the expression | |
689 | try: | |
690 | etree.fromstring('<a></a>').xpath(xpath) | |
691 | except etree.XPathEvalError: | |
692 | raise ConfigurationError('Invalid xpath expression: ' + xpath) | |
693 | timeout = config.getint('timeout', fallback=5) | |
694 | return cls(name, xpath, config['url'], timeout) | |
695 | except ValueError as error: | |
696 | raise ConfigurationError('Configuration error ' + str(error)) | |
697 | except KeyError as error: | |
698 | raise ConfigurationError('No ' + str(error) + | |
699 | ' entry defined for the XPath check') | |
700 | ||
701 | def __init__(self, name, xpath, url, timeout): | |
702 | Check.__init__(self, name) | |
703 | self._xpath = xpath | |
704 | self._url = url | |
705 | self._timeout = timeout | |
706 | ||
707 | def check(self): | |
708 | import requests | |
709 | import requests.exceptions | |
710 | from lxml import etree | |
711 | ||
712 | try: | |
713 | reply = requests.get(self._url, timeout=self._timeout).text | |
714 | root = etree.fromstring(reply) | |
715 | if root.xpath(self._xpath): | |
716 | return "XPath matches for url " + self._url | |
717 | except requests.exceptions.RequestException as error: | |
718 | raise TemporaryCheckError(error) | |
719 | except etree.XMLSyntaxError as error: | |
720 | raise TemporaryCheckError(error) | |
721 | ||
722 | ||
723 | def execute_suspend(command: str): | |
724 | """Suspend the system by calling the specified command. | |
725 | ||
726 | Args: | |
727 | command: | |
728 | The command to execute, which will be executed using shell | |
729 | execution | |
730 | """ | |
731 | _logger.info('Suspending using command: %s', command) | |
732 | try: | |
733 | subprocess.check_call(command, shell=True) | |
734 | except subprocess.CalledProcessError: | |
735 | _logger.warning('Unable to execute suspend command: %s', command, | |
736 | exc_info=True) | |
737 | ||
738 | ||
739 | # pylint: disable=invalid-name | |
740 | _checks = [] # type: List[Check] | |
741 | # pylint: enable=invalid-name | |
742 | ||
743 | ||
744 | def execute_checks(checks: Iterable[Check], all_checks: bool, logger) -> bool: | |
745 | """Execute the provided checks sequentially. | |
746 | ||
747 | Args: | |
748 | checks: | |
749 | the checks to execute | |
750 | all_checks: | |
751 | if ``True``, execute all checks even if a previous one already | |
752 | matched. | |
753 | ||
754 | Return: | |
755 | ``True`` if a check matched | |
756 | """ | |
757 | matched = False | |
758 | for check in checks: | |
759 | logger.debug('Executing check %s', check.name) | |
760 | try: | |
761 | result = check.check() | |
762 | if result is not None: | |
763 | logger.info('Check %s matched. Reason: %s', check.name, result) | |
764 | matched = True | |
765 | if not all_checks: | |
766 | logger.debug('Skipping further checks') | |
767 | break | |
768 | except TemporaryCheckError: | |
769 | logger.warning('Check %s failed. Ignoring...', check, | |
770 | exc_info=True) | |
771 | return matched | |
772 | ||
773 | ||
774 | def loop(interval: int, | |
775 | idle_time: int, | |
776 | sleep_fn: Callable, | |
777 | all_checks: bool = False, | |
778 | run_for: Optional[int] = None, | |
779 | woke_up_file: str = '/var/run/autosuspend-just-woke-up') -> None: | |
780 | """Run the main loop of the daemon. | |
781 | ||
782 | Args: | |
783 | interval: | |
784 | the length of one iteration of the main loop in seconds | |
785 | idle_time: | |
786 | the required amount of time the system has to be idle before | |
787 | suspension is triggered | |
788 | sleep_fn: | |
789 | a callable that triggers suspension | |
790 | run_for: | |
791 | if specified, run the main loop for the specified amount of seconds | |
792 | before terminating (approximately) | |
793 | """ | |
794 | logger = logging.getLogger('loop') | |
795 | ||
796 | start_time = time.time() | |
797 | ||
798 | idle_since = None | |
799 | while (run_for is None) or (time.time() < (start_time + run_for)): | |
800 | logger.info('Starting new check iteration') | |
801 | ||
802 | matched = execute_checks(_checks, all_checks, logger) | |
803 | ||
804 | logger.debug('All checks have been executed') | |
805 | ||
806 | if os.path.isfile(woke_up_file): | |
807 | logger.info('Just woke up from suspension. Resetting') | |
808 | os.remove(woke_up_file) | |
809 | idle_since = None | |
810 | time.sleep(interval) | |
811 | elif matched: | |
812 | logger.info('Check iteration finished. System is active. ' | |
813 | 'Sleeping until next iteration') | |
814 | idle_since = None | |
815 | time.sleep(interval) | |
816 | else: | |
817 | if idle_since is None: | |
818 | idle_since = time.time() | |
819 | logger.info('No checks matched. System is idle since %s', | |
820 | idle_since) | |
821 | if time.time() - idle_since > idle_time: | |
822 | logger.info('System is idle long enough. Suspending...') | |
823 | sleep_fn() | |
824 | idle_since = None | |
825 | else: | |
826 | logger.info('Desired idle time of %s secs not reached so far. ' | |
827 | 'Continuing checks', idle_time) | |
828 | time.sleep(interval) | |
829 | ||
830 | ||
831 | def set_up_checks(config: configparser.ConfigParser) -> List[Check]: | |
832 | """Set up :py.class:`Check` instances from a given configuration. | |
833 | ||
834 | Args: | |
835 | config: | |
836 | the configuration to use | |
837 | """ | |
838 | configured_checks = [] | |
839 | ||
840 | check_section = [s for s in config.sections() if s.startswith('check.')] | |
841 | for section in check_section: | |
842 | name = section[len('check.'):] | |
843 | # legacy method to determine the check name from the section header | |
844 | class_name = name | |
845 | # if there is an explicit class, use that one with higher priority | |
846 | if 'class' in config[section]: | |
847 | class_name = config[section]['class'] | |
848 | enabled = config.getboolean(section, 'enabled', fallback=False) | |
849 | ||
850 | if not enabled: | |
851 | _logger.debug('Skipping disabled check {}'.format(name)) | |
852 | continue | |
853 | ||
854 | _logger.info('Configuring check {} with class {}'.format( | |
855 | name, class_name)) | |
856 | try: | |
857 | klass = globals()[class_name] | |
858 | except KeyError: | |
859 | raise ConfigurationError( | |
860 | 'Cannot create check named {}: Class does not exist'.format( | |
861 | class_name)) | |
862 | ||
863 | check = klass.create(name, config[section]) | |
864 | if not isinstance(check, Check): | |
865 | raise ConfigurationError( | |
866 | 'Check {} is not a correct Check instance'.format(check)) | |
867 | _logger.debug('Created check instance {}'.format(check)) | |
868 | configured_checks.append(check) | |
869 | ||
870 | if not configured_checks: | |
871 | raise ConfigurationError('No checks enabled') | |
872 | ||
873 | return configured_checks | |
874 | ||
875 | ||
876 | def parse_config(config_file: Iterable[str]): | |
877 | """Parse the configuration file. | |
878 | ||
879 | Args: | |
880 | config_file: | |
881 | The file to parse | |
882 | """ | |
883 | _logger.debug('Reading config file %s', config_file) | |
884 | config = configparser.ConfigParser() | |
885 | config.read_file(config_file) | |
886 | _logger.debug('Parsed config file: %s', config) | |
887 | return config | |
888 | ||
889 | ||
890 | def parse_arguments(args: Optional[Sequence[str]]) -> argparse.Namespace: | |
891 | """Parse command line arguments. | |
892 | ||
893 | Args: | |
894 | args: | |
895 | if specified, use the provided arguments instead of the default | |
896 | ones determined via the :module:`sys` module. | |
897 | """ | |
898 | parser = argparse.ArgumentParser( | |
899 | description='Automatically suspends a server ' | |
900 | 'based on several criteria', | |
901 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) | |
902 | ||
903 | try: | |
904 | default_config = open('/etc/autosuspend.conf', 'r') | |
905 | except (FileNotFoundError, IsADirectoryError, PermissionError): | |
906 | default_config = None | |
907 | parser.add_argument( | |
908 | '-c', '--config', | |
909 | dest='config_file', | |
910 | type=argparse.FileType('r'), | |
911 | default=default_config, | |
912 | required=default_config is None, | |
913 | metavar='FILE', | |
914 | help='The config file to use') | |
915 | parser.add_argument( | |
916 | '-a', '--allchecks', | |
917 | dest='all_checks', | |
918 | default=False, | |
919 | action='store_true', | |
920 | help='Execute all checks even if one has already prevented ' | |
921 | 'the system from going to sleep. Useful to debug individual ' | |
922 | 'checks.') | |
923 | parser.add_argument( | |
924 | '-r', '--runfor', | |
925 | dest='run_for', | |
926 | type=float, | |
927 | default=None, | |
928 | metavar='SEC', | |
929 | help="If set, run for the specified amount of seconds before exiting " | |
930 | "instead of endless execution.") | |
931 | parser.add_argument( | |
932 | '-l', '--logging', | |
933 | type=argparse.FileType('r'), | |
934 | nargs='?', | |
935 | default=False, | |
936 | const=True, | |
937 | metavar='FILE', | |
938 | help='Configures the python logging system. If used ' | |
939 | 'without an argument, all logging is enabled to ' | |
940 | 'the console. If used with an argument, the ' | |
941 | 'configuration is read from the specified file.') | |
942 | ||
943 | result = parser.parse_args(args) | |
944 | ||
945 | _logger.debug('Parsed command line arguments %s', result) | |
946 | ||
947 | return result | |
948 | ||
949 | ||
950 | def configure_logging(file_or_flag): | |
951 | """Configure the python :mod:`logging` system. | |
952 | ||
953 | If the provided argument is a `file` instance, try to use the | |
954 | pointed to file as a configuration for the logging system. Otherwise, | |
955 | if the given argument evaluates to :class:True:, use a default | |
956 | configuration with many logging messages. If everything fails, just log | |
957 | starting from the warning level. | |
958 | ||
959 | Args: | |
960 | file_or_flag (file or bool): | |
961 | either a configuration file pointed by a :ref:`file object | |
962 | <python:bltin-file-objects>` instance or something that evaluates | |
963 | to :class:`bool`. | |
964 | """ | |
965 | if isinstance(file_or_flag, bool): | |
966 | if file_or_flag: | |
967 | logging.basicConfig(level=logging.DEBUG) | |
968 | else: | |
969 | # at least configure warnings | |
970 | logging.basicConfig(level=logging.WARNING) | |
971 | else: | |
972 | try: | |
973 | logging.config.fileConfig(file_or_flag) | |
974 | except Exception as error: | |
975 | # at least configure warnings | |
976 | logging.basicConfig(level=logging.WARNING) | |
977 | _logger.warning('Unable to configure logging from file %s. ' | |
978 | 'Falling back to warning level.', | |
979 | file_or_flag, | |
980 | exc_info=True) | |
981 | ||
982 | ||
983 | def main(args=None): | |
984 | """Run the daemon.""" | |
985 | global _checks | |
986 | args = parse_arguments(args) | |
987 | configure_logging(args.logging) | |
988 | config = parse_config(args.config_file) | |
989 | _checks = set_up_checks(config) | |
990 | loop(config.getfloat('general', 'interval', fallback=60), | |
991 | config.getfloat('general', 'idle_time', fallback=300), | |
992 | functools.partial(execute_suspend, | |
993 | config.get('general', 'suspend_cmd')), | |
994 | all_checks=args.all_checks, | |
995 | run_for=args.run_for, | |
996 | woke_up_file=config.get('general', 'woke_up_file', | |
997 | fallback='/var/run/autosuspend-just-woke-up')) | |
998 | ||
999 | ||
1000 | if __name__ == "__main__": | |
1001 | main() |
0 | [Unit] | |
1 | Description=Notifies autosuspend about suspension | |
2 | Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html | |
3 | Before=sleep.target | |
4 | ||
5 | [Service] | |
6 | Type=simple | |
7 | ExecStart=/usr/bin/touch /var/run/autosuspend-just-woke-up | |
8 | ||
9 | [Install] | |
10 | WantedBy=sleep.target |
0 | [loggers] | |
1 | keys=root,loop,checks | |
2 | ||
3 | [handlers] | |
4 | keys=consoleHandler | |
5 | ||
6 | [formatters] | |
7 | keys=simpleFormatter | |
8 | ||
9 | [logger_root] | |
10 | level=INFO | |
11 | handlers=consoleHandler | |
12 | ||
13 | [logger_checks] | |
14 | qualname=check | |
15 | propagate=0 | |
16 | level=INFO | |
17 | handlers=consoleHandler | |
18 | ||
19 | [logger_loop] | |
20 | qualname=loop | |
21 | propagate=0 | |
22 | level=DEBUG | |
23 | handlers=consoleHandler | |
24 | ||
25 | [handler_consoleHandler] | |
26 | class=StreamHandler | |
27 | level=DEBUG | |
28 | formatter=simpleFormatter | |
29 | args=(sys.stdout,) | |
30 | ||
31 | [formatter_simpleFormatter] | |
32 | format=%(asctime)s - %(name)s - %(levelname)s - %(message)s | |
33 | datefmt= |
0 | [general] | |
1 | interval = 30 | |
2 | idle_time = 900 | |
3 | suspend_cmd = /usr/bin/systemctl suspend | |
4 | woke_up_file = /var/run/autosuspend-just-woke-up | |
5 | ||
6 | [check.Ping] | |
7 | enabled = false | |
8 | hosts = 192.168.0.7 | |
9 | ||
10 | [check.RemoteUsers] | |
11 | class = Users | |
12 | enabled = true | |
13 | name = .* | |
14 | terminal = .* | |
15 | host = [0-9].* | |
16 | ||
17 | [check.LocalUsers] | |
18 | class = Users | |
19 | enabled = false | |
20 | name = .* | |
21 | terminal = .* | |
22 | host = localhost | |
23 | ||
24 | [check.Smb] | |
25 | enabled = false | |
26 | ||
27 | [check.Nfs] | |
28 | enabled = false | |
29 | ||
30 | [check.Processes] | |
31 | enabled = true | |
32 | processes = rsync,rdiff-backup | |
33 | ||
34 | [check.Mpd] | |
35 | enabled = true | |
36 | timeout = 2 | |
37 | ||
38 | [check.Kodi] | |
39 | enabled = true | |
40 | timeout = 2 | |
41 | ||
42 | [check.ActiveConnection] | |
43 | enabled = true | |
44 | ports = 22,445,139 | |
45 | ||
46 | [check.Load] | |
47 | enabled = true | |
48 | threshold = 2.5 | |
49 | ||
50 | [check.XIdleTime] | |
51 | enabled = true | |
52 | timeout = 360 | |
53 | ||
54 | [check.Tvheadend] | |
55 | class = XPath | |
56 | enabled = false | |
57 | url = http://localhost:9981/status.xml | |
58 | xpath = /currentload/subscriptions[number(.) > 0] | /currentload/recordings/recording |
0 | [Unit] | |
1 | Description=A daemon to suspend your server in case of inactivity | |
2 | Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html | |
3 | After=network.target | |
4 | ||
5 | [Service] | |
6 | ExecStart=/usr/bin/autosuspend -l /etc/autosuspend-logging.conf | |
7 | ||
8 | [Install] | |
9 | WantedBy=multi-user.target |
0 | [Unit] | |
1 | Description=Notifies autosuspend about suspension | |
2 | Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html | |
3 | Before=sleep.target | |
4 | ||
5 | [Service] | |
6 | Type=simple | |
7 | ExecStart=/usr/bin/touch /var/run/autosuspend-just-woke-up | |
8 | ||
9 | [Install] | |
10 | WantedBy=sleep.target |
0 | [loggers] | |
1 | keys=root,autosuspend,checks | |
2 | ||
3 | [handlers] | |
4 | keys=consoleHandler | |
5 | ||
6 | [formatters] | |
7 | keys=simpleFormatter | |
8 | ||
9 | [logger_root] | |
10 | level=INFO | |
11 | handlers=consoleHandler | |
12 | ||
13 | [logger_autosuspend] | |
14 | qualname=autosuspend | |
15 | propagate=0 | |
16 | level=INFO | |
17 | handlers=consoleHandler | |
18 | ||
19 | [logger_checks] | |
20 | qualname=autosuspend.checks | |
21 | propagate=0 | |
22 | level=INFO | |
23 | handlers=consoleHandler | |
24 | ||
25 | [handler_consoleHandler] | |
26 | class=StreamHandler | |
27 | level=DEBUG | |
28 | formatter=simpleFormatter | |
29 | args=(sys.stdout,) | |
30 | ||
31 | [formatter_simpleFormatter] | |
32 | format=%(asctime)s - %(name)s - %(levelname)s - %(message)s | |
33 | datefmt= |
0 | ## This is an exemplary documentation file that mainly serves as a syntax explanation. | |
1 | ## For a list of available options and checks, please refer to `man autosuspend.conf` or the online documentation. | |
2 | ||
3 | [general] | |
4 | interval = 30 | |
5 | idle_time = 900 | |
6 | suspend_cmd = /usr/bin/systemctl suspend | |
7 | wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm | |
8 | woke_up_file = /var/run/autosuspend-just-woke-up | |
9 | # Can be used to call a command before suspending, either with scheduled wake up or not. | |
10 | # notify_cmd_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system. Wake up at {iso}' | |
11 | # notify_cmd_no_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system.' | |
12 | ||
13 | # Basic activity check configuration. | |
14 | # The check class name is derived from the section header (Ping in this case). | |
15 | # Remember to enable desired checks. They are disabled by default. | |
16 | [check.Ping] | |
17 | enabled = true | |
18 | hosts = 192.168.0.7 | |
19 | ||
20 | # This check is disabled. | |
21 | [check.Smb] | |
22 | enabled = false | |
23 | ||
24 | # Example for a custom check name. | |
25 | # This will use the Users check with the custom name RemoteUsers. | |
26 | # Custom names are necessary in case a check class is used multiple times. | |
27 | # Custom names can also be used for clarification. | |
28 | [check.RemoteUsers] | |
29 | class = Users | |
30 | enabled = true | |
31 | name = .* | |
32 | terminal = .* | |
33 | host = [0-9].* | |
34 | ||
35 | # Here the Users activity check is used again with different settings and a different name | |
36 | [check.LocalUsers] | |
37 | class = Users | |
38 | enabled = true | |
39 | name = .* | |
40 | terminal = .* | |
41 | host = localhost | |
42 | ||
43 | # Checks to determine the next scheduled wakeup are prefixed with 'wakeup'. | |
44 | [wakeup.Calendar] | |
45 | enabled = true | |
46 | url = http://example.org/test.ics | |
47 | ||
48 | # Apart from this, wake up checks reuse the same configuration mechanism. |
0 | [Unit] | |
1 | Description=A daemon to suspend your server in case of inactivity | |
2 | Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html | |
3 | After=network.target | |
4 | ||
5 | [Service] | |
6 | ExecStart=/usr/bin/autosuspend -l /etc/autosuspend-logging.conf | |
7 | ||
8 | [Install] | |
9 | WantedBy=multi-user.target |
0 | Available checks | |
1 | ---------------- | |
0 | Available activity checks | |
1 | ------------------------- | |
2 | 2 | |
3 | 3 | The following checks for activity are currently implemented. |
4 | 4 | Each of the is described with its available configuration options and required optional dependencies. |
5 | 5 | |
6 | ActiveCalendarEvent | |
7 | ~~~~~~~~~~~~~~~~~~~ | |
8 | ||
9 | .. program:: check-active-calendar-event | |
10 | ||
11 | Checks an online `iCalendar`_ file for events that are currently running. | |
12 | If so, this indicates activity and prevents suspending the system. | |
13 | Thus, a calendar can be provided with times at which the system should not go to sleep. | |
14 | If this calendar resides on an online service like a groupware it might even be possible to invite the system. | |
15 | ||
16 | Options | |
17 | ^^^^^^^ | |
18 | ||
19 | .. option:: url | |
20 | ||
21 | The URL to query for the iCalendar file | |
22 | ||
23 | .. option:: timeout | |
24 | ||
25 | Timeout for executed requests in seconds. Default: 5. | |
26 | ||
27 | .. option:: username | |
28 | ||
29 | Optional user name to use for authenticating at a server requiring authentication. | |
30 | If used, also a password must be provided. | |
31 | ||
32 | .. option:: password | |
33 | ||
34 | Optional password to use for authenticating at a server requiring authentication. | |
35 | If used, also a user name must be provided. | |
36 | ||
37 | Requirements | |
38 | ^^^^^^^^^^^^ | |
39 | ||
40 | * `requests`_ | |
41 | * `icalendar <python-icalendar_>`_ | |
42 | * `dateutil`_ | |
43 | * `tzlocal`_ | |
44 | ||
6 | 45 | ActiveConnection |
7 | 46 | ~~~~~~~~~~~~~~~~ |
8 | 47 | |
59 | 98 | .. option:: timeout |
60 | 99 | |
61 | 100 | Request timeout in seconds, default: ``5`` |
101 | ||
102 | .. option:: username | |
103 | ||
104 | Optional user name to use for authenticating at a server requiring authentication. | |
105 | If used, also a password must be provided. | |
106 | ||
107 | .. option:: password | |
108 | ||
109 | Optional password to use for authenticating at a server requiring authentication. | |
110 | If used, also a user name must be provided. | |
111 | ||
112 | Requirements | |
113 | ^^^^^^^^^^^^ | |
114 | ||
115 | - `requests`_ | |
116 | ||
117 | KodiIdleTime | |
118 | ~~~~~~~~~~~~ | |
119 | ||
120 | .. program:: check-kodi-idle-time | |
121 | ||
122 | Checks whether there has been interaction with the Kodi user interface recently. | |
123 | This prevents suspending the system in case someone is currently browsing collections etc. | |
124 | This check is redundant to ``XIdleTime`` on systems using an X server, but might be necessary in case Kodi is used standalone. | |
125 | It does not replace the ``Kodi`` check, as the idle time is not updated when media is playing. | |
126 | ||
127 | Options | |
128 | ^^^^^^^ | |
129 | ||
130 | .. option:: idle_time | |
131 | ||
132 | Marks the system active in case a user interaction has appeared within the this amount of seconds until now. | |
133 | Default: ``120`` | |
134 | ||
135 | .. option:: url | |
136 | ||
137 | Base URL of the JSON RPC API of the Kodi instance, default: ``http://localhost:8080/jsonrpc`` | |
138 | ||
139 | .. option:: timeout | |
140 | ||
141 | Request timeout in seconds, default: ``5`` | |
142 | ||
143 | .. option:: username | |
144 | ||
145 | Optional user name to use for authenticating at a server requiring authentication. | |
146 | If used, also a password must be provided. | |
147 | ||
148 | .. option:: password | |
149 | ||
150 | Optional password to use for authenticating at a server requiring authentication. | |
151 | If used, also a user name must be provided. | |
62 | 152 | |
63 | 153 | Requirements |
64 | 154 | ^^^^^^^^^^^^ |
301 | 391 | |
302 | 392 | .. program:: check-xpath |
303 | 393 | |
304 | A generic check which queries a configured URL and expected the reply to contain XML data. | |
394 | A generic check which queries a configured URL and expects the reply to contain XML data. | |
305 | 395 | The returned XML document is checked against a configured `XPath`_ expression and in case the expression matches, the system is assumed to be active. |
306 | 396 | |
307 | This can for instance be used with the TV streaming server `tvheadend`_, which provides its current status via an XML reply. | |
308 | In case you want to prevent suspending in case there are active subscriptions or recordings, use the following XPath:: | |
309 | ||
310 | /currentload/subscriptions[number(.) > 0] | /currentload/recordings/recording | |
311 | ||
312 | If you have a permantently running subscriber like `Kodi`_, increase the ``0`` to ``1``. | |
313 | The second fragment after the ``|`` prevents suspending in case any recording is planned, even far in the future. If you want to be a bit more permissive, the second fragment could be replaced with:: | |
314 | ||
315 | /currentload/recordings/recording/status | /currentload/recordings/recording/next[number(.) < 10] | |
316 | ||
317 | This only prevents suspending in case of active or planned recordings closer that 10 minutes. | |
397 | Some common applications and their respective configuration are: | |
398 | ||
399 | `tvheadend`_ | |
400 | The required URL for `tvheadend`_ is (if running on the same host):: | |
401 | ||
402 | http://127.0.0.1:9981/status.xml | |
403 | ||
404 | In case you want to prevent suspending in case there are active subscriptions or recordings, use the following XPath:: | |
405 | ||
406 | /currentload/subscriptions[number(.) > 0] | /currentload/recordings/recording/start | |
407 | ||
408 | If you have a permantently running subscriber like `Kodi`_, increase the ``0`` to ``1``. | |
409 | ||
410 | `Plex`_ | |
411 | For `Plex`_, use the following URL (if running on the same host):: | |
412 | ||
413 | http://127.0.0.1:32400/status/sessions/?X-Plex-Token={TOKEN} | |
414 | ||
415 | Where acquiring the token is `documented here <https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/>`_. | |
416 | ||
417 | If suspending should be prevented in case of any activity, this simple `XPath`_ expression will suffice:: | |
418 | ||
419 | /MediaContainer[@size > 2] | |
318 | 420 | |
319 | 421 | Options |
320 | 422 | ^^^^^^^ |
332 | 434 | |
333 | 435 | Timeout for executed requests in seconds. Default: 5. |
334 | 436 | |
437 | .. option:: username | |
438 | ||
439 | Optional user name to use for authenticating at a server requiring authentication. | |
440 | If used, also a password must be provided. | |
441 | ||
442 | .. option:: password | |
443 | ||
444 | Optional password to use for authenticating at a server requiring authentication. | |
445 | If used, also a user name must be provided. | |
446 | ||
335 | 447 | Requirements |
336 | 448 | ^^^^^^^^^^^^ |
337 | 449 |
0 | Available wake up checks | |
1 | ------------------------ | |
2 | ||
3 | The following checks for wake up times are currently implemented. | |
4 | Each of the checks is described with its available configuration options and required optional dependencies. | |
5 | ||
6 | Calendar | |
7 | ~~~~~~~~ | |
8 | ||
9 | .. program:: wakeup-calendar | |
10 | ||
11 | Determines next wake up time from an `iCalendar`_ file. | |
12 | The next event that starts after the current time is chosen as the next wake up time. | |
13 | ||
14 | Remember that updates to the calendar can only be reflected in case the system currently running. | |
15 | Changes to the calendar made while the system is sleeping will obviously not trigger an earlier wake up. | |
16 | ||
17 | Options | |
18 | ^^^^^^^ | |
19 | ||
20 | .. option:: url | |
21 | ||
22 | The URL to query for the XML reply. | |
23 | ||
24 | .. option:: username | |
25 | ||
26 | Optional user name to use for authenticating at a server requiring authentication. | |
27 | If used, also a password must be provided. | |
28 | ||
29 | .. option:: password | |
30 | ||
31 | Optional password to use for authenticating at a server requiring authentication. | |
32 | If used, also a user name must be provided. | |
33 | ||
34 | .. option:: xpath | |
35 | ||
36 | The XPath query to execute. | |
37 | Must always return number strings or nothing. | |
38 | ||
39 | .. option:: timeout | |
40 | ||
41 | Timeout for executed requests in seconds. Default: 5. | |
42 | ||
43 | ||
44 | Requirements | |
45 | ^^^^^^^^^^^^ | |
46 | ||
47 | * `requests`_ | |
48 | * `icalendar <python-icalendar_>`_ | |
49 | * `dateutil`_ | |
50 | * `tzlocal`_ | |
51 | ||
52 | Command | |
53 | ~~~~~~~ | |
54 | ||
55 | .. program:: wakeup-command | |
56 | ||
57 | Determines the wake up time by calling an external command | |
58 | The command always has to succeed. | |
59 | If something is printed on stdout by the command, this has to be the next wake up time in UTC seconds. | |
60 | ||
61 | The command is executed as is using shell execution. | |
62 | Beware of malicious commands in obtained configuration files. | |
63 | ||
64 | Options | |
65 | ^^^^^^^ | |
66 | ||
67 | .. option:: command | |
68 | ||
69 | The command to execute including all arguments | |
70 | ||
71 | File | |
72 | ~~~~ | |
73 | ||
74 | .. program:: wakeup-file | |
75 | ||
76 | Determines the wake up time by reading a file from a configured location. | |
77 | The file has to contains the planned wake up time as an int or float in seconds UTC. | |
78 | ||
79 | Options | |
80 | ^^^^^^^ | |
81 | ||
82 | .. option:: path | |
83 | ||
84 | path of the file to read in case it is present | |
85 | ||
86 | ||
87 | Periodic | |
88 | ~~~~~~~~ | |
89 | ||
90 | .. program:: wakeup-periodic | |
91 | ||
92 | Always schedules a wake up at a specified delta from now on. | |
93 | Can be used to let the system wake up every once in a while, for instance, to refresh the calendar used in the ``Calendar`` check. | |
94 | ||
95 | Options | |
96 | ^^^^^^^ | |
97 | ||
98 | .. option:: unit | |
99 | ||
100 | A string indicating in which unit the delta is specified. | |
101 | Valid options are: ``microseconds``, ``milliseconds``, ``seconds``, ``minutes``, ``hours``, ``days``, ``weeks``. | |
102 | ||
103 | .. option:: value | |
104 | ||
105 | The value of the delta as an int. | |
106 | ||
107 | XPath | |
108 | ~~~~~ | |
109 | ||
110 | .. program:: wakeup-xpath | |
111 | ||
112 | A generic check which queries a configured URL and expects the reply to contain XML data. | |
113 | The returned XML document is parsed using a configured `XPath`_ expression that has to return timestamps UTC (as strings, not elements). | |
114 | These are interpreted as the wake up times. | |
115 | In case multiple entries exist, the soonest one is used. | |
116 | ||
117 | Options | |
118 | ^^^^^^^ | |
119 | ||
120 | .. option:: url | |
121 | ||
122 | The URL to query for the XML reply. | |
123 | ||
124 | .. option:: xpath | |
125 | ||
126 | The XPath query to execute. | |
127 | Must always return number strings or nothing. | |
128 | ||
129 | .. option:: timeout | |
130 | ||
131 | Timeout for executed requests in seconds. Default: 5. | |
132 | ||
133 | .. option:: username | |
134 | ||
135 | Optional user name to use for authenticating at a server requiring authentication. | |
136 | If used, also a password must be provided. | |
137 | ||
138 | .. option:: password | |
139 | ||
140 | Optional password to use for authenticating at a server requiring authentication. | |
141 | If used, also a user name must be provided. | |
142 | ||
143 | XPathDelta | |
144 | ~~~~~~~~~~ | |
145 | ||
146 | .. program:: wakeup-xpath-delta | |
147 | ||
148 | Comparable to ``XPath``, but expects that the returned results represent the wake up time as a delta to the current time in a configurable unit. | |
149 | ||
150 | This check can for instance be used for `tvheadend`_ with the following expression:: | |
151 | ||
152 | //recording/next/text() | |
153 | ||
154 | Options | |
155 | ^^^^^^^ | |
156 | ||
157 | .. option:: url | |
158 | ||
159 | The URL to query for the XML reply. | |
160 | ||
161 | .. option:: username | |
162 | ||
163 | Optional user name to use for authenticating at a server requiring authentication. | |
164 | If used, also a password must be provided. | |
165 | ||
166 | .. option:: password | |
167 | ||
168 | Optional password to use for authenticating at a server requiring authentication. | |
169 | If used, also a user name must be provided. | |
170 | ||
171 | .. option:: xpath | |
172 | ||
173 | The XPath query to execute. | |
174 | Must always return number strings or nothing. | |
175 | ||
176 | .. option:: timeout | |
177 | ||
178 | Timeout for executed requests in seconds. Default: 5. | |
179 | ||
180 | .. option:: unit | |
181 | ||
182 | A string indicating in which unit the delta is specified. | |
183 | Valid options are: ``microseconds``, ``milliseconds``, ``seconds``, ``minutes``, ``hours``, ``days``, ``weeks``. | |
184 | Default: minutes |
0 | Changelog | |
1 | ========= | |
2 | ||
3 | 2.0.2 | |
4 | ----- | |
5 | ||
6 | This is a minor bug fix release. | |
7 | ||
8 | Fixes bugs | |
9 | ~~~~~~~~~~ | |
10 | ||
11 | * ``Kodi`` and ``KodiIdleTime`` checks now catch ``JSONDecodeErrors`` (:issue:`45`) | |
12 | * ``Kodi`` and ``KodiIdleTime`` checks now support authentication (:issue:`47`) | |
13 | ||
14 | 2.0 | |
15 | --- | |
16 | ||
17 | This version adds scheduled wake ups as its main features. | |
18 | In addition to checks for activity, a set of checks for future activities can now be configured to determine times at which the systems needs to be online again. | |
19 | The daemon will start suspending in case the next detected wake up time is far enough in the future and schedule an automatic system wake up at the closest determined wake up time. | |
20 | This can, for instance, be used to ensure that the system is up again when a TV show has to be recorded to disk. | |
21 | ||
22 | Below is a detailed list of notable changes. | |
23 | ||
24 | New features | |
25 | ~~~~~~~~~~~~ | |
26 | ||
27 | * Scheduled wake ups (:issue:`9`). | |
28 | * Ability to call configurable user commands before suspending for notification purposes (:issue:`25`). | |
29 | * Checks using network requests now support authentication (:issue:`32`). | |
30 | * Checks using network requests now support ``file://`` URIs (:issue:`36`). | |
31 | ||
32 | New activity checks | |
33 | ^^^^^^^^^^^^^^^^^^^ | |
34 | ||
35 | * ``ActiveCalendarEvent``: Uses an `iCalendar`_ file (via network request) to prevent suspending in case an event in the calendar is currently active (:issue:`24`). | |
36 | * ``KodiIdleTime``: Checks the idle time of `Kodi`_ to prevent suspending in case the menu is used (:issue:`33`). | |
37 | ||
38 | New wakeup checks | |
39 | ^^^^^^^^^^^^^^^^^ | |
40 | ||
41 | * ``Calendar``: Wake up the system at the next event in an `iCalendar`_ file (requested via network, :issue:`30`). | |
42 | * ``Command``: Call an external command to determine the next wake up time (:issue:`26`). | |
43 | * ``File``: Read the next wake up time from a file (:issue:`9`). | |
44 | * ``Periodic``: Wake up at a defined interval, for instance, to refresh calendars for the ``Calendar`` check (:issue:`34`). | |
45 | * ``XPath`` and ``XPathDelta``: Request an XML document and use `XPath`_ to extract the next wakeup time. | |
46 | ||
47 | Fixed bugs | |
48 | ~~~~~~~~~~ | |
49 | ||
50 | * `XPath`_ checks now support responses with explicit encodings (:issue:`29`). | |
51 | ||
52 | Notable changes | |
53 | ~~~~~~~~~~~~~~~ | |
54 | ||
55 | * The namespace of the logging systems has been rearranged (:issue:`38`). | |
56 | Existing logging configurations might require changes. | |
57 | * The default configuration file has been reduced to explain the syntax and semantics. | |
58 | For a list of all available checks, refer the manual instead (:issue:`39`). | |
59 | ||
60 | For a complete list of all addressed issues and new features, please refer to the respective `Github milestone <https://github.com/languitar/autosuspend/issues?utf8=%E2%9C%93&q=is%3Aissue+milestone%3A2.0>`_. |
5 | 5 | |
6 | 6 | # needs_sphinx = '1.0' |
7 | 7 | |
8 | extensions = ['sphinx.ext.intersphinx'] | |
8 | extensions = ['sphinx.ext.intersphinx', 'sphinx_issues'] | |
9 | 9 | |
10 | 10 | templates_path = ['_templates'] |
11 | 11 | source_suffix = '.rst' |
50 | 50 | .. _tvheadend: https://tvheadend.org/ |
51 | 51 | .. _XPath: https://www.w3.org/TR/xpath/ |
52 | 52 | .. _logind: https://www.freedesktop.org/wiki/Software/systemd/logind/ |
53 | .. _iCalendar: https://tools.ietf.org/html/rfc5545 | |
54 | .. _dateutil: https://dateutil.readthedocs.io | |
55 | .. _python-icalendar: https://icalendar.readthedocs.io | |
56 | .. _tzlocal: https://pypi.org/project/tzlocal/ | |
57 | .. _requests-file: https://github.com/dashea/requests-file | |
58 | .. _Plex: https://www.plex.tv/ | |
53 | 59 | |
54 | 60 | .. |project| replace:: {project} |
55 | 61 | .. |project_bold| replace:: **{project}** |
64 | 70 | html_theme = 'sphinx_rtd_theme' |
65 | 71 | # html_theme_options = {} |
66 | 72 | |
67 | html_static_path = ['_static'] | |
73 | # html_static_path = ['_static'] | |
68 | 74 | |
69 | 75 | html_sidebars = { |
70 | 76 | '**': [ |
87 | 93 | [author], |
88 | 94 | 5), |
89 | 95 | ] |
96 | ||
97 | # issues | |
98 | issues_github_path = 'languitar/autosuspend' |
10 | 10 | interval = 30 |
11 | 11 | idle_time = 900 |
12 | 12 | suspend_cmd = /usr/bin/systemctl suspend |
13 | wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm | |
14 | notify_cmd_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system. Wake up at {iso}' | |
15 | notify_cmd_no_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system.' | |
13 | 16 | |
14 | 17 | [check.Ping] |
15 | 18 | enabled = false |
21 | 24 | name = .* |
22 | 25 | terminal = .* |
23 | 26 | host = [0-9].* |
27 | ||
28 | [wakeup.File] | |
29 | enabled = True | |
30 | path = /var/run/autosuspend/wakeup | |
24 | 31 | |
25 | The configuration file consists of a ``[general]`` section, which specifies general processing options, and multiple options of the format ``[check.*]``. | |
26 | These sections describe the checks to execute to determine inactivity. | |
32 | The configuration file consists of a ``[general]`` section, which specifies general processing options, and multiple sections of the format ``[check.*]`` and ``[wakeup.*]``. | |
33 | These sections describe the activity and wake up checks to execute. | |
27 | 34 | |
28 | General Configuration | |
35 | General configuration | |
29 | 36 | ~~~~~~~~~~~~~~~~~~~~~ |
30 | 37 | |
31 | 38 | .. program:: config-general |
39 | 46 | .. option:: idle_time |
40 | 47 | |
41 | 48 | The required amount of time in seconds with no detected activity before the host will be suspended. |
49 | Default: 300 seconds | |
50 | ||
51 | .. option:: min_sleep_time | |
52 | ||
53 | The minimal amount of time in seconds the system has to sleep for actually triggering suspension. | |
54 | If a scheduled wake up results in an effective time below this value, the system will not sleep. | |
55 | Default: 1200 seconds | |
56 | ||
57 | .. option:: wakeup_delta | |
58 | ||
59 | Wake up the system this amount of seconds earlier than the time that was determined for an event that requires the system to be up. | |
60 | This value adds a safety margin for the time a the wake up effectively takes. | |
61 | Default: 30 seconds | |
42 | 62 | |
43 | 63 | .. option:: suspend_cmd |
44 | 64 | |
45 | 65 | The command to execute in case the host shall be suspended. |
46 | 66 | This line can contain additional command line arguments to the command to execute. |
67 | ||
68 | .. option:: wakeup_cmd | |
69 | ||
70 | The command to execute for scheduling a wake up of the system. | |
71 | The given string is processed using Python's :meth:`str.format` and a format argument called ``timestamp`` encodes the UTC timestamp of the planned wake up time (float). | |
72 | Additionally ``iso`` can be used to acquire the timestamp in ISO 8601 format. | |
73 | ||
74 | .. option:: notify_cmd_wakeup | |
75 | ||
76 | A command to execute before the system is going to suspend for the purpose of notifying interested clients. | |
77 | This command is only called in case a wake up is scheduled. | |
78 | The given string is processed using Python's :meth:`str.format` and a format argument called ``timestamp`` encodes the UTC timestamp of the planned wake up time (float). | |
79 | Additionally ``iso`` can be used to acquire the timestamp in ISO 8601 format. | |
80 | If empty or not specified, no command will be called. | |
81 | ||
82 | .. option:: notify_cmd_no_wakeup | |
83 | ||
84 | A command to execute before the system is going to suspend for the purpose of notifying interested clients. | |
85 | This command is only called in case NO wake up is scheduled. | |
86 | Hence, no string formatting options are available. | |
87 | If empty or not specified, no command will be called. | |
47 | 88 | |
48 | 89 | .. option:: woke_up_file |
49 | 90 | |
52 | 93 | Thus, changing the location also requires adapting the respective service. |
53 | 94 | Refer to :ref:`systemd-integration` for further details. |
54 | 95 | |
55 | Check Configuration | |
56 | ~~~~~~~~~~~~~~~~~~~ | |
96 | Activity check configuration | |
97 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
57 | 98 | |
58 | 99 | .. program:: config-check |
59 | 100 | |
60 | For each check to execute, a section with the name format ``[check.*]`` needs to be created. | |
101 | For each activity check to execute, a section with the name format ``[check.*]`` needs to be created. | |
61 | 102 | Each check has a name and an executing class which implements the behavior. |
62 | 103 | The fraction of the section name ``check.`` determines the name, and in case no class option is given inside the section, also the class which implements the check. |
63 | In case the `option:`class` option is specified, the name is completely user-defined and the same check can even be instantiated multiple times with differing names. | |
104 | In case the :option:`class` option is specified, the name is completely user-defined and the same check can even be instantiated multiple times with differing names. | |
64 | 105 | |
65 | 106 | For each check, these generic options can be specified: |
66 | 107 | |
67 | 108 | .. option:: class |
68 | 109 | |
69 | 110 | Name of the class implementing the check. |
70 | If this is not specified, the section name must represent a valid check class (see list below). | |
111 | If the name does not contain a dot (``.``), this is assumed to be one of the checks provided by |project| internally. | |
112 | Otherwise, this can be used to pull in third-party checks. | |
113 | If this option is not specified, the section name must represent a valid internal check class. | |
71 | 114 | |
72 | 115 | .. option:: enabled |
73 | 116 | |
75 | 118 | ``false`` is assumed if not specified. |
76 | 119 | |
77 | 120 | Furthermore, each check might have custom options. |
121 | ||
122 | Wake up check configuration | |
123 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
124 | ||
125 | Wake up checks uses the same configuration logic as the previously described activity checks. | |
126 | However, the configuration file sections start with ``wakeup.`` instead of ``check.``. |
2 | 2 | |
3 | 3 | .. include:: configuration_file.inc |
4 | 4 | |
5 | For options of individual checks, please refer to :ref:`available-checks`. | |
5 | For options of individual checks, please refer to :ref:`available-checks` and :ref:`available-wakeups`. |
0 | |project_program| is a daemon that periodically that suspends a system on inactivity. | |
1 | For this purpose, it periodically iterates a number of user-configurable checks, which shall indicate whether a certain activity on the host is currently present that should prevent the host from suspending. | |
0 | |project_program| is a daemon that periodically suspends a system on inactivity and wakes it up again automatically in case it is needed. | |
1 | For this purpose, |project_program| periodically iterates a number of user-configurable activity checks, which indicate whether an activity on the host is currently present that should prevent the host from suspending. | |
2 | 2 | In case one of the checks indicates such activity, no action is taken and periodic checking continues. |
3 | 3 | Otherwise, in case no activity can be detected, this state needs to be present for a specified amount of time before the host is suspended by |project_program|. |
4 | In addition to the activity checks, wake up checks are used to determine planned future activities of the system (for instance, a TV recording or a periodic backup). | |
5 | In case such activities are known before suspending, |project_program| triggers a command to wake up the system automatically before the soonest activity. |
0 | |project| - a daemon to suspend a system on inactivity | |
1 | ====================================================== | |
0 | |project| - a daemon to automatically suspend and wake up a system | |
1 | ================================================================== | |
2 | 2 | |
3 | 3 | .. include:: description.inc |
4 | 4 | |
9 | 9 | installation |
10 | 10 | configuration_file |
11 | 11 | available_checks |
12 | available_wakeups | |
12 | 13 | systemd_integration |
13 | 14 | wakeup |
14 | 15 | debugging |
15 | 16 | manpages |
17 | changelog | |
16 | 18 | support |
17 | 19 | |
18 | 20 | Indices and tables |
5 | 5 | Requirements |
6 | 6 | ------------ |
7 | 7 | |
8 | The minimal requirements are. | |
9 | ||
8 | 10 | * `Python 3`_ |
9 | 11 | * `psutil`_ |
10 | 12 | |
11 | Additionally, the some checks need the following dependencies to function properly: | |
13 | Additionally, the some checks need further dependencies to function properly. | |
14 | Please refer to :ref:`available-checks` for individual requirements. | |
12 | 15 | |
13 | * `python-mpd2`_ | |
14 | * `requests`_ | |
15 | * `lxml`_ | |
16 | ||
17 | Please refer to :ref:`available-checks` for further details on these checks in which check requires which optional dependency. | |
16 | If checks using URLs to load data should support ``file://`` URLs, `requests-file`_ is needed. | |
18 | 17 | |
19 | 18 | Binary packages |
20 | 19 | --------------- |
12 | 12 | |
13 | 13 | .. include:: configuration_file.inc |
14 | 14 | |
15 | The options of individual checks are outlined below. | |
16 | ||
15 | 17 | .. include:: available_checks.inc |
16 | 18 | |
17 | The options of individual checks are outlined below. | |
19 | .. include:: available_wakeups.inc |
0 | System wakeup | |
1 | ============= | |
0 | On-demand wakeup | |
1 | ================ | |
2 | 2 | |
3 | |project_bold| itself does not handle waking up the system in case it is needed again. | |
4 | Several solutions exist for this case, which will be enumerated here. | |
5 | ||
6 | Wake on LAN | |
7 | ----------- | |
8 | ||
9 | A simple way to wake up the system is to enable `Wake on LAN <https://en.wikipedia.org/wiki/Wake-on-LAN>`_. | |
3 | |project_bold| itself only handles wake ups for events that were foreseeable at the time the system was put into sleep mode. | |
4 | In case the system also has to be used on-demand, a simple way to wake up the system is to enable `Wake on LAN <https://en.wikipedia.org/wiki/Wake-on-LAN>`_. | |
10 | 5 | Here, a special network packet can be used to wake up the system again. |
11 | Multiple front-ends exist to send these magick packets. | |
6 | Multiple front-ends exist to send these magic packets. | |
12 | 7 | The typical usage scenario with this approach is to manually send the magic packet when the system is needed, wait a few seconds, and then to perform the intended tasks with the system. |
13 | 8 | |
14 | 9 | Wake on LAN needs to be specifically enabled on the system. |
27 | 22 | * `Wake On Lan <https://f-droid.org/en/packages/net.mafro.android.wakeonlan/>`__: Android, open-source |
28 | 23 | * `Kore (Kodi remote control) <https://play.google.com/store/apps/details?id=org.xbmc.kore>`__: Android, for Kodi users |
29 | 24 | * `Mocha WOL <https://itunes.apple.com/de/app/mocha-wol/id422625778>`__: iOS |
30 | ||
31 | RTC wake up timers | |
32 | ------------------ | |
33 | ||
34 | Another option is to schedule a time at which the system shall wake up automatically using RTC timers. | |
35 | This can be handy in case the system shall perform a task in the future, but can sleep until this task starts. | |
36 | A common front-end to control these timers is `rtcwake <https://linux.die.net/man/8/rtcwake>`__, which can be used as follow: | |
37 | ||
38 | .. code-block:: bash | |
39 | ||
40 | # wake up again at a specified date | |
41 | rtcwake -m no --date '2017-12-24 17:02:23' | |
42 | # wake up again in a number of seconds | |
43 | rtcwake -m no -s 600 | |
44 | ||
45 | Please refer to :manpage:`rtcwake(8)` for further possibilities. | |
46 | ||
47 | Scheduled wake ups are a planned feature for |project|. |
0 | build: | |
1 | image: latest | |
2 | ||
3 | python: | |
4 | version: 3.6 | |
5 | setup_py_install: true | |
6 | ||
7 | requirements_file: requirements-doc.txt |
3 | 3 | [build_sphinx] |
4 | 4 | source-dir = doc/source |
5 | 5 | build-dir = doc/build |
6 | ||
7 | [flake8] | |
8 | exclude = | |
9 | doc, | |
10 | .tox, | |
11 | .git, | |
12 | __pycache__, | |
13 | build, | |
14 | dist, | |
15 | .eggs, | |
16 | env, | |
17 | .mypy_cache | |
18 | mypy_config=setup.cfg | |
19 | per-file-ignores = | |
20 | test_*: D1 | |
21 | tests/__init__.py: D1 | |
22 | ignore = D202,D10,D102,D413,P1 | |
23 | application-import-names = autosuspend | |
24 | import-order-style = google | |
25 | ||
26 | [mypy] | |
27 | ignore_missing_imports=True | |
28 | ||
29 | [tool:pytest] | |
30 | markers = | |
31 | integration: longer-running integration tests |
0 | 0 | import os |
1 | 1 | import os.path |
2 | 2 | |
3 | from setuptools import setup | |
3 | from setuptools import find_packages, setup | |
4 | 4 | |
5 | 5 | name = 'autosuspend' |
6 | 6 | |
31 | 31 | 'Mpd': ['python-mpd2'], |
32 | 32 | 'Kodi': ['requests'], |
33 | 33 | 'XPath': ['lxml', 'requests'], |
34 | 'Logind support': ['dbus-python'], | |
34 | 'Logind': ['dbus-python'], | |
35 | 'ical': ['requests', 'icalendar', 'python-dateutil', 'tzlocal'], | |
36 | 'localfiles': ['requests-file'], | |
37 | 'test': ['pytest', 'pytest-cov', 'pytest-mock', 'freezegun'], | |
35 | 38 | }, |
36 | tests_require=[ | |
37 | 'pytest', | |
38 | 'pytest-cov', | |
39 | 'pytest-mock', | |
40 | ], | |
41 | 39 | |
42 | scripts=[ | |
43 | 'autosuspend' | |
44 | ], | |
40 | package_dir={ | |
41 | '': 'src' | |
42 | }, | |
43 | packages=find_packages('src'), | |
44 | ||
45 | entry_points={ | |
46 | 'console_scripts': [ | |
47 | 'autosuspend = autosuspend:main', | |
48 | ] | |
49 | }, | |
50 | ||
45 | 51 | data_files=[ |
46 | ('etc', ['autosuspend.conf', | |
47 | 'autosuspend-logging.conf']), | |
48 | ('lib/systemd/system', ['autosuspend.service', | |
49 | 'autosuspend-detect-suspend.service']) | |
52 | ('etc', ['data/autosuspend.conf', | |
53 | 'data/autosuspend-logging.conf']), | |
54 | ('lib/systemd/system', ['data/autosuspend.service', | |
55 | 'data/autosuspend-detect-suspend.service']), | |
50 | 56 | ], |
51 | 57 | ) |
0 | #!/usr/bin/env python3 | |
1 | """A daemon to suspend a system on inactivity.""" | |
2 | ||
3 | import argparse | |
4 | import configparser | |
5 | import datetime | |
6 | import functools | |
7 | import logging | |
8 | import logging.config | |
9 | import os | |
10 | import os.path | |
11 | import subprocess | |
12 | import time | |
13 | from typing import (Callable, | |
14 | IO, | |
15 | Iterable, | |
16 | List, | |
17 | Optional, | |
18 | Sequence, | |
19 | Type, | |
20 | TypeVar) | |
21 | ||
22 | from .checks import (Activity, | |
23 | Check, | |
24 | ConfigurationError, | |
25 | TemporaryCheckError, | |
26 | Wakeup) | |
27 | from .util import logger_by_class_instance | |
28 | ||
29 | ||
30 | # pylint: disable=invalid-name | |
31 | _logger = logging.getLogger('autosuspend') | |
32 | # pylint: enable=invalid-name | |
33 | ||
34 | ||
35 | def execute_suspend(command: str, wakeup_at: Optional[datetime.datetime]): | |
36 | """Suspend the system by calling the specified command. | |
37 | ||
38 | Args: | |
39 | command: | |
40 | The command to execute, which will be executed using shell | |
41 | execution | |
42 | wakeup_at: | |
43 | potential next wakeup time. Only informative. | |
44 | """ | |
45 | _logger.info('Suspending using command: %s', command) | |
46 | try: | |
47 | subprocess.check_call(command, shell=True) | |
48 | except subprocess.CalledProcessError: | |
49 | _logger.warning('Unable to execute suspend command: %s', command, | |
50 | exc_info=True) | |
51 | ||
52 | ||
53 | def notify_suspend(command_wakeup_template: Optional[str], | |
54 | command_no_wakeup: Optional[str], | |
55 | wakeup_at: Optional[datetime.datetime]): | |
56 | """Call a command to notify on suspending. | |
57 | ||
58 | Args: | |
59 | command_no_wakeup_template: | |
60 | A template for the command to execute in case a wakeup is | |
61 | scheduled. | |
62 | It will be executed using shell execution. | |
63 | The template is processed with string formatting to include | |
64 | information on a potentially scheduled wakeup. | |
65 | Notifications can be disable by providing ``None`` here. | |
66 | command_no_wakeup: | |
67 | Command to execute for notification in case no wake up is | |
68 | scheduled. | |
69 | Will be executed using shell execution. | |
70 | wakeup_at: | |
71 | if not ``None``, this is the time the system will wake up again | |
72 | """ | |
73 | ||
74 | def safe_exec(command): | |
75 | _logger.info('Notifying using command: %s', command) | |
76 | try: | |
77 | subprocess.check_call(command, shell=True) | |
78 | except subprocess.CalledProcessError: | |
79 | _logger.warning('Unable to execute notification command: %s', | |
80 | command, exc_info=True) | |
81 | ||
82 | if wakeup_at and command_wakeup_template: | |
83 | command = command_wakeup_template.format( | |
84 | timestamp=wakeup_at.timestamp(), | |
85 | iso=wakeup_at.isoformat()) | |
86 | safe_exec(command) | |
87 | elif not wakeup_at and command_no_wakeup: | |
88 | safe_exec(command_no_wakeup) | |
89 | else: | |
90 | _logger.info('No suitable notification command configured.') | |
91 | ||
92 | ||
93 | def notify_and_suspend(suspend_cmd: str, | |
94 | notify_cmd_wakeup_template: Optional[str], | |
95 | notify_cmd_no_wakeup: Optional[str], | |
96 | wakeup_at: Optional[datetime.datetime]): | |
97 | notify_suspend(notify_cmd_wakeup_template, notify_cmd_no_wakeup, wakeup_at) | |
98 | execute_suspend(suspend_cmd, wakeup_at) | |
99 | ||
100 | ||
101 | def schedule_wakeup(command_template: str, wakeup_at: datetime.datetime): | |
102 | command = command_template.format(timestamp=wakeup_at.timestamp(), | |
103 | iso=wakeup_at.isoformat()) | |
104 | _logger.info('Scheduling wakeup using command: %s', command) | |
105 | try: | |
106 | subprocess.check_call(command, shell=True) | |
107 | except subprocess.CalledProcessError: | |
108 | _logger.warning('Unable to execute wakeup scheduling command: %s', | |
109 | command, exc_info=True) | |
110 | ||
111 | ||
112 | def execute_checks(checks: Iterable[Activity], | |
113 | all_checks: bool, | |
114 | logger) -> bool: | |
115 | """Execute the provided checks sequentially. | |
116 | ||
117 | Args: | |
118 | checks: | |
119 | the checks to execute | |
120 | all_checks: | |
121 | if ``True``, execute all checks even if a previous one already | |
122 | matched. | |
123 | ||
124 | Return: | |
125 | ``True`` if a check matched | |
126 | """ | |
127 | matched = False | |
128 | for check in checks: | |
129 | logger.debug('Executing check %s', check.name) | |
130 | try: | |
131 | result = check.check() | |
132 | if result is not None: | |
133 | logger.info('Check %s matched. Reason: %s', check.name, result) | |
134 | matched = True | |
135 | if not all_checks: | |
136 | logger.debug('Skipping further checks') | |
137 | break | |
138 | except TemporaryCheckError: | |
139 | logger.warning('Check %s failed. Ignoring...', check, | |
140 | exc_info=True) | |
141 | return matched | |
142 | ||
143 | ||
144 | def execute_wakeups(wakeups: Iterable[Wakeup], | |
145 | timestamp: datetime.datetime, | |
146 | logger) -> Optional[datetime.datetime]: | |
147 | ||
148 | wakeup_at = None | |
149 | for wakeup in wakeups: | |
150 | try: | |
151 | this_at = wakeup.check(timestamp) | |
152 | ||
153 | # sanity checks | |
154 | if this_at is None: | |
155 | continue | |
156 | if this_at <= timestamp: | |
157 | logger.warning('Wakeup %s returned a scheduled wakeup at %s, ' | |
158 | 'which is earlier than the current time %s. ' | |
159 | 'Ignoring.', | |
160 | wakeup, this_at, timestamp) | |
161 | continue | |
162 | ||
163 | if wakeup_at is None: | |
164 | wakeup_at = this_at | |
165 | else: | |
166 | wakeup_at = min(this_at, wakeup_at) | |
167 | except TemporaryCheckError: | |
168 | logger.warning('Wakeup %s failed. Ignoring...', wakeup, | |
169 | exc_info=True) | |
170 | ||
171 | return wakeup_at | |
172 | ||
173 | ||
174 | class Processor: | |
175 | """Implements the logic for triggering suspension. | |
176 | ||
177 | Args: | |
178 | activities: | |
179 | the activity checks to execute | |
180 | wakeups: | |
181 | the wakeup checks to execute | |
182 | idle_time: | |
183 | the required amount of time the system has to be idle before | |
184 | suspension is triggered in seconds | |
185 | min_sleep_time: | |
186 | the minimum time the system has to sleep before it is woken up | |
187 | again in seconds. | |
188 | wakeup_delta: | |
189 | wake up this amount of seconds before the scheduled wake up time. | |
190 | sleep_fn: | |
191 | a callable that triggers suspension | |
192 | wakeup_fn: | |
193 | a callable that schedules the wakeup at the specified time in UTC | |
194 | seconds | |
195 | notify_fn: | |
196 | a callable that is called before suspending. | |
197 | One argument gives the scheduled wakeup time or ``None``. | |
198 | all_activities: | |
199 | if ``True``, execute all activity checks even if a previous one | |
200 | already matched. | |
201 | """ | |
202 | ||
203 | def __init__(self, | |
204 | activities: List[Activity], | |
205 | wakeups: List[Wakeup], | |
206 | idle_time: float, | |
207 | min_sleep_time: float, | |
208 | wakeup_delta: float, | |
209 | sleep_fn: Callable, | |
210 | wakeup_fn: Callable[[datetime.datetime], None], | |
211 | all_activities: bool) -> None: | |
212 | self._logger = logger_by_class_instance(self) | |
213 | self._activities = activities | |
214 | self._wakeups = wakeups | |
215 | self._idle_time = idle_time | |
216 | self._min_sleep_time = min_sleep_time | |
217 | self._wakeup_delta = wakeup_delta | |
218 | self._sleep_fn = sleep_fn | |
219 | self._wakeup_fn = wakeup_fn | |
220 | self._all_activities = all_activities | |
221 | self._idle_since = None # type: Optional[datetime.datetime] | |
222 | ||
223 | def _reset_state(self, reason: str) -> None: | |
224 | self._logger.info('%s. Resetting state', reason) | |
225 | self._idle_since = None | |
226 | ||
227 | def iteration(self, timestamp: datetime.datetime, just_woke_up: bool): | |
228 | self._logger.info('Starting new check iteration') | |
229 | ||
230 | # determine system activity | |
231 | active = execute_checks(self._activities, self._all_activities, | |
232 | self._logger) | |
233 | self._logger.debug('All activity checks have been executed. ' | |
234 | 'Active: %s', active) | |
235 | # determine potential wake ups | |
236 | wakeup_at = execute_wakeups(self._wakeups, timestamp, self._logger) | |
237 | self._logger.debug('Checks report, system should wake up at %s', | |
238 | wakeup_at) | |
239 | if wakeup_at is not None: | |
240 | wakeup_at -= datetime.timedelta(seconds=self._wakeup_delta) | |
241 | self._logger.debug('With delta, system should wake up at %s', | |
242 | wakeup_at) | |
243 | ||
244 | # exit in case something prevents suspension | |
245 | if just_woke_up: | |
246 | self._reset_state('Just woke up from suspension') | |
247 | return | |
248 | if active: | |
249 | self._reset_state('System is active') | |
250 | return | |
251 | ||
252 | # set idle timestamp if required | |
253 | if self._idle_since is None: | |
254 | self._idle_since = timestamp | |
255 | ||
256 | self._logger.info('System is idle since %s', self._idle_since) | |
257 | ||
258 | # determine if systems is idle long enough | |
259 | self._logger.debug('Idle seconds: %s', | |
260 | (timestamp - self._idle_since).total_seconds()) | |
261 | if (timestamp - self._idle_since).total_seconds() > self._idle_time: | |
262 | self._logger.info('System is idle long enough.') | |
263 | ||
264 | # idle time would be reached, handle wake up | |
265 | if wakeup_at is not None: | |
266 | wakeup_in = wakeup_at - timestamp | |
267 | if wakeup_in.total_seconds() < self._min_sleep_time: | |
268 | self._logger.info('Would wake up in %s seconds, which is ' | |
269 | 'below the minimum amount of %s s. ' | |
270 | 'Not suspending.', | |
271 | wakeup_in.total_seconds(), | |
272 | self._min_sleep_time) | |
273 | return | |
274 | ||
275 | # schedule wakeup | |
276 | self._logger.info('Scheduling wakeup at %s', wakeup_at) | |
277 | self._wakeup_fn(wakeup_at) | |
278 | ||
279 | self._reset_state('Going to suspend') | |
280 | self._sleep_fn(wakeup_at) | |
281 | else: | |
282 | self._logger.info('Desired idle time of %s s not reached yet.', | |
283 | self._idle_time) | |
284 | ||
285 | ||
286 | def loop(processor: Processor, | |
287 | interval: int, | |
288 | run_for: Optional[int], | |
289 | woke_up_file: str) -> None: | |
290 | """Run the main loop of the daemon. | |
291 | ||
292 | Args: | |
293 | processor: | |
294 | the processor to use for handling the suspension computations | |
295 | interval: | |
296 | the length of one iteration of the main loop in seconds | |
297 | idle_time: | |
298 | the required amount of time the system has to be idle before | |
299 | suspension is triggered | |
300 | sleep_fn: | |
301 | a callable that triggers suspension | |
302 | run_for: | |
303 | if specified, run the main loop for the specified amount of seconds | |
304 | before terminating (approximately) | |
305 | """ | |
306 | ||
307 | start_time = datetime.datetime.now(datetime.timezone.utc) | |
308 | while (run_for is None) or (datetime.datetime.now(datetime.timezone.utc) < | |
309 | (start_time + datetime.timedelta( | |
310 | seconds=run_for))): | |
311 | ||
312 | just_woke_up = os.path.isfile(woke_up_file) | |
313 | if just_woke_up: | |
314 | os.remove(woke_up_file) | |
315 | ||
316 | processor.iteration(datetime.datetime.now(datetime.timezone.utc), | |
317 | just_woke_up) | |
318 | ||
319 | time.sleep(interval) | |
320 | ||
321 | ||
322 | CheckType = TypeVar('CheckType', bound=Check) | |
323 | ||
324 | ||
325 | def set_up_checks(config: configparser.ConfigParser, | |
326 | prefix: str, | |
327 | internal_module: str, | |
328 | target_class: Type[CheckType], | |
329 | error_none: bool = False) -> List[CheckType]: | |
330 | """Set up :py.class:`Check` instances from a given configuration. | |
331 | ||
332 | Args: | |
333 | config: | |
334 | the configuration to use | |
335 | prefix: | |
336 | The prefix of sections in the configuration file to use for | |
337 | creating instances. | |
338 | internal_module: | |
339 | Name of the submodule of ``autosuspend.checks`` to use for | |
340 | discovering internal check classes. | |
341 | target_class: | |
342 | the base class to check new instance against | |
343 | error_none: | |
344 | Raise an error if nothing was configured? | |
345 | """ | |
346 | configured_checks = [] # type: List[CheckType] | |
347 | ||
348 | check_section = [s for s in config.sections() | |
349 | if s.startswith('{}.'.format(prefix))] | |
350 | for section in check_section: | |
351 | name = section[len('{}.'.format(prefix)):] | |
352 | # legacy method to determine the check name from the section header | |
353 | class_name = name | |
354 | # if there is an explicit class, use that one with higher priority | |
355 | if 'class' in config[section]: | |
356 | class_name = config[section]['class'] | |
357 | enabled = config.getboolean(section, 'enabled', fallback=False) | |
358 | ||
359 | if not enabled: | |
360 | _logger.debug('Skipping disabled check {}'.format(name)) | |
361 | continue | |
362 | ||
363 | # try to find the required class | |
364 | if '.' in class_name: | |
365 | # dot in class name means external class | |
366 | import_module, import_class = class_name.rsplit('.', maxsplit=1) | |
367 | else: | |
368 | # no dot means internal class | |
369 | import_module = 'autosuspend.checks.{}'.format(internal_module) | |
370 | import_class = class_name | |
371 | _logger.info( | |
372 | 'Configuring check {} with class {} from module {} ' | |
373 | 'using config section items {}'.format( | |
374 | name, import_class, import_module, | |
375 | dict(config[section].items()))) | |
376 | try: | |
377 | klass = getattr(__import__(import_module, fromlist=[import_class]), | |
378 | import_class) | |
379 | except AttributeError as error: | |
380 | raise ConfigurationError( | |
381 | 'Cannot create built-in check named {}: ' | |
382 | 'Class does not exist'.format(class_name)) from error | |
383 | ||
384 | check = klass.create(name, config[section]) | |
385 | if not isinstance(check, target_class): | |
386 | raise ConfigurationError( | |
387 | 'Check {} is not a correct {} instance'.format( | |
388 | check, target_class.__name__)) | |
389 | _logger.debug('Created check instance {} with options {}'.format( | |
390 | check, check.options())) | |
391 | configured_checks.append(check) | |
392 | ||
393 | if not configured_checks and error_none: | |
394 | raise ConfigurationError('No checks enabled') | |
395 | ||
396 | return configured_checks | |
397 | ||
398 | ||
399 | def parse_config(config_file: Iterable[str]): | |
400 | """Parse the configuration file. | |
401 | ||
402 | Args: | |
403 | config_file: | |
404 | The file to parse | |
405 | """ | |
406 | _logger.debug('Reading config file %s', config_file) | |
407 | config = configparser.ConfigParser( | |
408 | interpolation=configparser.ExtendedInterpolation()) | |
409 | config.read_file(config_file) | |
410 | _logger.debug('Parsed config file: %s', config) | |
411 | return config | |
412 | ||
413 | ||
414 | def parse_arguments(args: Optional[Sequence[str]]) -> argparse.Namespace: | |
415 | """Parse command line arguments. | |
416 | ||
417 | Args: | |
418 | args: | |
419 | if specified, use the provided arguments instead of the default | |
420 | ones determined via the :module:`sys` module. | |
421 | """ | |
422 | parser = argparse.ArgumentParser( | |
423 | description='Automatically suspends a server ' | |
424 | 'based on several criteria', | |
425 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) | |
426 | ||
427 | IO # for making pyflakes happy | |
428 | default_config = None # type: Optional[IO[str]] | |
429 | try: | |
430 | default_config = open('/etc/autosuspend.conf', 'r') | |
431 | except (FileNotFoundError, IsADirectoryError, PermissionError): | |
432 | pass | |
433 | parser.add_argument( | |
434 | '-c', '--config', | |
435 | dest='config_file', | |
436 | type=argparse.FileType('r'), | |
437 | default=default_config, | |
438 | required=default_config is None, | |
439 | metavar='FILE', | |
440 | help='The config file to use') | |
441 | parser.add_argument( | |
442 | '-a', '--allchecks', | |
443 | dest='all_checks', | |
444 | default=False, | |
445 | action='store_true', | |
446 | help='Execute all checks even if one has already prevented ' | |
447 | 'the system from going to sleep. Useful to debug individual ' | |
448 | 'checks.') | |
449 | parser.add_argument( | |
450 | '-r', '--runfor', | |
451 | dest='run_for', | |
452 | type=float, | |
453 | default=None, | |
454 | metavar='SEC', | |
455 | help="If set, run for the specified amount of seconds before exiting " | |
456 | "instead of endless execution.") | |
457 | parser.add_argument( | |
458 | '-l', '--logging', | |
459 | type=argparse.FileType('r'), | |
460 | nargs='?', | |
461 | default=False, | |
462 | const=True, | |
463 | metavar='FILE', | |
464 | help='Configures the python logging system. If used ' | |
465 | 'without an argument, all logging is enabled to ' | |
466 | 'the console. If used with an argument, the ' | |
467 | 'configuration is read from the specified file.') | |
468 | ||
469 | result = parser.parse_args(args) | |
470 | ||
471 | _logger.debug('Parsed command line arguments %s', result) | |
472 | ||
473 | return result | |
474 | ||
475 | ||
476 | def configure_logging(file_or_flag): | |
477 | """Configure the python :mod:`logging` system. | |
478 | ||
479 | If the provided argument is a `file` instance, try to use the | |
480 | pointed to file as a configuration for the logging system. Otherwise, | |
481 | if the given argument evaluates to :class:True:, use a default | |
482 | configuration with many logging messages. If everything fails, just log | |
483 | starting from the warning level. | |
484 | ||
485 | Args: | |
486 | file_or_flag (file or bool): | |
487 | either a configuration file pointed by a :ref:`file object | |
488 | <python:bltin-file-objects>` instance or something that evaluates | |
489 | to :class:`bool`. | |
490 | """ | |
491 | if isinstance(file_or_flag, bool): | |
492 | if file_or_flag: | |
493 | logging.basicConfig(level=logging.DEBUG) | |
494 | else: | |
495 | # at least configure warnings | |
496 | logging.basicConfig(level=logging.WARNING) | |
497 | else: | |
498 | try: | |
499 | logging.config.fileConfig(file_or_flag) | |
500 | except Exception: | |
501 | # at least configure warnings | |
502 | logging.basicConfig(level=logging.WARNING) | |
503 | _logger.warning('Unable to configure logging from file %s. ' | |
504 | 'Falling back to warning level.', | |
505 | file_or_flag, | |
506 | exc_info=True) | |
507 | ||
508 | ||
509 | def main(args=None): | |
510 | """Run the daemon.""" | |
511 | args = parse_arguments(args) | |
512 | ||
513 | configure_logging(args.logging) | |
514 | ||
515 | config = parse_config(args.config_file) | |
516 | ||
517 | checks = set_up_checks(config, 'check', 'activity', Activity, | |
518 | error_none=True) | |
519 | wakeups = set_up_checks(config, 'wakeup', 'wakeup', Wakeup) | |
520 | ||
521 | processor = Processor( | |
522 | checks, wakeups, | |
523 | config.getfloat('general', 'idle_time', fallback=300), | |
524 | config.getfloat('general', 'min_sleep_time', fallback=1200), | |
525 | config.getfloat('general', 'wakeup_delta', fallback=30), | |
526 | functools.partial(notify_and_suspend, | |
527 | config.get('general', 'suspend_cmd'), | |
528 | config.get('general', 'notify_cmd_wakeup', | |
529 | fallback=None), | |
530 | config.get('general', 'notify_cmd_no_wakeup', | |
531 | fallback=None)), | |
532 | functools.partial(schedule_wakeup, | |
533 | config.get('general', 'wakeup_cmd')), | |
534 | all_activities=args.all_checks) | |
535 | loop(processor, | |
536 | config.getfloat('general', 'interval', fallback=60), | |
537 | run_for=args.run_for, | |
538 | woke_up_file=config.get('general', 'woke_up_file', | |
539 | fallback='/var/run/autosuspend-just-woke-up')) | |
540 | ||
541 | ||
542 | if __name__ == "__main__": | |
543 | main() |
0 | """Provides the basic types used for checks.""" | |
1 | ||
2 | import abc | |
3 | import configparser | |
4 | import datetime | |
5 | from typing import Any, Mapping, Optional | |
6 | ||
7 | from autosuspend.util import logger_by_class_instance | |
8 | ||
9 | ||
10 | class ConfigurationError(RuntimeError): | |
11 | """Indicates an error in the configuration of a :class:`Check`.""" | |
12 | ||
13 | pass | |
14 | ||
15 | ||
16 | class TemporaryCheckError(RuntimeError): | |
17 | """Indicates a temporary error while performing a check. | |
18 | ||
19 | Such an error can be ignored for some time since it might recover | |
20 | automatically. | |
21 | """ | |
22 | ||
23 | pass | |
24 | ||
25 | ||
26 | class SevereCheckError(RuntimeError): | |
27 | """Indicates a sever check error that will probably not recover. | |
28 | ||
29 | There no hope this situation recovers. | |
30 | """ | |
31 | ||
32 | pass | |
33 | ||
34 | ||
35 | class Check(abc.ABC): | |
36 | """Base class for all kinds of checks. | |
37 | ||
38 | Subclasses must call this class' ``__init__`` method. | |
39 | ||
40 | Args: | |
41 | name (str): | |
42 | Configured name of the check | |
43 | """ | |
44 | ||
45 | @classmethod | |
46 | @abc.abstractmethod | |
47 | def create(cls, name: str, config: configparser.SectionProxy) -> 'Check': | |
48 | """Create a new check instance from the provided configuration. | |
49 | ||
50 | Args: | |
51 | name (str): | |
52 | user-defined name for the check | |
53 | config (configparser.SectionProxy): | |
54 | config parser section with the configuration for this check | |
55 | ||
56 | Raises: | |
57 | ConfigurationError: | |
58 | Configuration for this check is inappropriate | |
59 | ||
60 | """ | |
61 | pass | |
62 | ||
63 | def __init__(self, name: str = None) -> None: | |
64 | if name: | |
65 | self.name = name | |
66 | else: | |
67 | self.name = self.__class__.__name__ | |
68 | self.logger = logger_by_class_instance(self, name) | |
69 | ||
70 | def options(self) -> Mapping[str, Any]: | |
71 | """Return the configured options as a mapping. | |
72 | ||
73 | This is used for debugging purposes only. | |
74 | """ | |
75 | return {k: v for k, v in self.__dict__.items() | |
76 | if not callable(v) and k != 'logger'} | |
77 | ||
78 | def __str__(self): | |
79 | return '{name}[class={clazz}]'.format(name=self.name, | |
80 | clazz=self.__class__.__name__) | |
81 | ||
82 | ||
83 | class Activity(Check): | |
84 | """Base class for activity checks. | |
85 | ||
86 | Subclasses must call this class' __init__ method. | |
87 | """ | |
88 | ||
89 | @abc.abstractmethod | |
90 | def check(self) -> Optional[str]: | |
91 | """Determine if system activity exists that prevents suspending. | |
92 | ||
93 | Returns: | |
94 | str: | |
95 | A string describing which condition currently prevents sleep, | |
96 | else ``None``. | |
97 | ||
98 | Raises: | |
99 | TemporaryCheckError: | |
100 | Check execution currently fails but might recover later | |
101 | SevereCheckError: | |
102 | Check executions fails severely | |
103 | """ | |
104 | pass | |
105 | ||
106 | def __str__(self) -> str: | |
107 | return '{name}[class={clazz}]'.format(name=self.name, | |
108 | clazz=self.__class__.__name__) | |
109 | ||
110 | ||
111 | class Wakeup(Check): | |
112 | """Represents a check for potential wake up points.""" | |
113 | ||
114 | @abc.abstractmethod | |
115 | def check(self, | |
116 | timestamp: datetime.datetime) -> Optional[datetime.datetime]: | |
117 | """Indicate if a wakeup has to be scheduled for this check. | |
118 | ||
119 | Args: | |
120 | timestamp: | |
121 | the time at which the call to the wakeup check is made | |
122 | ||
123 | Returns: | |
124 | a datetime describing when the system needs to be running again or | |
125 | ``None`` if no wakeup is required. Use timezone aware datetimes. | |
126 | ||
127 | Raises: | |
128 | TemporaryCheckError: | |
129 | Check execution currently fails but might recover later | |
130 | SevereCheckError: | |
131 | Check executions fails severely | |
132 | """ | |
133 | pass |
0 | import configparser | |
1 | import copy | |
2 | from datetime import datetime, timedelta, timezone | |
3 | import glob | |
4 | from io import BytesIO | |
5 | import json | |
6 | import os | |
7 | import pwd | |
8 | import re | |
9 | import socket | |
10 | import subprocess | |
11 | import time | |
12 | from typing import Any, Dict | |
13 | ||
14 | import psutil | |
15 | ||
16 | from . import (Activity, | |
17 | Check, | |
18 | ConfigurationError, | |
19 | SevereCheckError, | |
20 | TemporaryCheckError) | |
21 | from .util import CommandMixin, NetworkMixin, XPathMixin | |
22 | from ..util.systemd import list_logind_sessions | |
23 | ||
24 | ||
25 | class ActiveCalendarEvent(NetworkMixin, Activity): | |
26 | """Determines activity by checking against events in an icalendar file.""" | |
27 | ||
28 | def __init__(self, name: str, **kwargs) -> None: | |
29 | NetworkMixin.__init__(self, **kwargs) | |
30 | Activity.__init__(self, name) | |
31 | ||
32 | def check(self): | |
33 | from ..util.ical import list_calendar_events | |
34 | response = self.request() | |
35 | start = datetime.now(timezone.utc) | |
36 | end = start + timedelta(minutes=1) | |
37 | events = list_calendar_events(BytesIO(response.content), start, end) | |
38 | self.logger.debug( | |
39 | 'Listing active events between %s and %s returned %s events', | |
40 | start, end, len(events)) | |
41 | if events: | |
42 | return 'Calendar event {} is active'.format(events[0]) | |
43 | else: | |
44 | return None | |
45 | ||
46 | ||
47 | class ActiveConnection(Activity): | |
48 | """Checks if a client connection exists on specified ports.""" | |
49 | ||
50 | @classmethod | |
51 | def create(cls, name, config): | |
52 | try: | |
53 | ports = config['ports'] | |
54 | ports = ports.split(',') | |
55 | ports = [p.strip() for p in ports] | |
56 | ports = {int(p) for p in ports} | |
57 | return cls(name, ports) | |
58 | except KeyError as error: | |
59 | raise ConfigurationError('Missing option ports') from error | |
60 | except ValueError as error: | |
61 | raise ConfigurationError('Ports must be integers') from error | |
62 | ||
63 | def __init__(self, name, ports): | |
64 | Activity.__init__(self, name) | |
65 | self._ports = ports | |
66 | ||
67 | def check(self): | |
68 | own_addresses = [(item.family, item.address) | |
69 | for sublist in psutil.net_if_addrs().values() | |
70 | for item in sublist] | |
71 | connected = [c.laddr[1] | |
72 | for c in psutil.net_connections() | |
73 | if ((c.family, c.laddr[0]) in own_addresses and | |
74 | c.status == 'ESTABLISHED' and | |
75 | c.laddr[1] in self._ports)] | |
76 | if connected: | |
77 | return 'Ports {} are connected'.format(connected) | |
78 | ||
79 | ||
80 | class ExternalCommand(CommandMixin, Activity): | |
81 | ||
82 | def __init__(self, name, command): | |
83 | CommandMixin.__init__(self, command) | |
84 | Check.__init__(self, name) | |
85 | ||
86 | def check(self): | |
87 | try: | |
88 | subprocess.check_call(self._command, shell=True) | |
89 | return 'Command {} succeeded'.format(self._command) | |
90 | except subprocess.CalledProcessError as error: | |
91 | return None | |
92 | ||
93 | ||
94 | class Kodi(NetworkMixin, Activity): | |
95 | ||
96 | QUERY = '?request={"jsonrpc": "2.0", "id": 1, ' \ | |
97 | '"method": "Player.GetActivePlayers"}' | |
98 | ||
99 | def __init__(self, name: str, url, **kwargs) -> None: | |
100 | NetworkMixin.__init__(self, url=url + self.QUERY, **kwargs) | |
101 | Activity.__init__(self, name) | |
102 | ||
103 | def check(self): | |
104 | try: | |
105 | reply = self.request().json() | |
106 | if 'result' not in reply: | |
107 | raise TemporaryCheckError('No result array in reply') | |
108 | if reply['result']: | |
109 | return "Kodi currently playing" | |
110 | else: | |
111 | return None | |
112 | except json.JSONDecodeError as error: | |
113 | raise TemporaryCheckError(error) from error | |
114 | ||
115 | ||
116 | class KodiIdleTime(NetworkMixin, Activity): | |
117 | ||
118 | @classmethod | |
119 | def collect_init_args(cls, config) -> Dict[str, Any]: | |
120 | try: | |
121 | args = NetworkMixin.collect_init_args(config) | |
122 | args['idle_time'] = config.getint('idle_time', fallback=120) | |
123 | return args | |
124 | except ValueError as error: | |
125 | raise ConfigurationError( | |
126 | 'Configuration error ' + str(error)) from error | |
127 | ||
128 | @classmethod | |
129 | def create(cls, name: str, config: configparser.SectionProxy): | |
130 | return cls(name, **cls.collect_init_args(config)) | |
131 | ||
132 | def __init__(self, name: str, url: str, idle_time: int, **kwargs) -> None: | |
133 | request = url + \ | |
134 | '?request={{"jsonrpc": "2.0", "id": 1, ' \ | |
135 | '"method": "XMBC.GetInfoBool"}},' \ | |
136 | '"params": {{"booleans": ["System.IdleTime({})"]}}'.format( | |
137 | idle_time) | |
138 | NetworkMixin.__init__(self, url=request, **kwargs) | |
139 | Activity.__init__(self, name) | |
140 | self._idle_time = idle_time | |
141 | ||
142 | def check(self): | |
143 | try: | |
144 | reply = self.request().json() | |
145 | if reply['result']["System.IdleTime({})".format(self._idle_time)]: | |
146 | return 'Someone interacts with Kodi' | |
147 | else: | |
148 | return None | |
149 | except (KeyError, TypeError, json.JSONDecodeError) as error: | |
150 | raise TemporaryCheckError(error) from error | |
151 | ||
152 | ||
153 | class Load(Activity): | |
154 | ||
155 | @classmethod | |
156 | def create(cls, name, config): | |
157 | try: | |
158 | return cls(name, | |
159 | config.getfloat('threshold', fallback=2.5)) | |
160 | except ValueError as error: | |
161 | raise ConfigurationError( | |
162 | 'Unable to parse threshold as float: {}'.format( | |
163 | error)) from error | |
164 | ||
165 | def __init__(self, name, threshold): | |
166 | Check.__init__(self, name) | |
167 | self._threshold = threshold | |
168 | ||
169 | def check(self): | |
170 | loadcurrent = os.getloadavg()[1] | |
171 | self.logger.debug("Load: %s", loadcurrent) | |
172 | if loadcurrent > self._threshold: | |
173 | return 'Load {} > threshold {}'.format(loadcurrent, | |
174 | self._threshold) | |
175 | else: | |
176 | return None | |
177 | ||
178 | ||
179 | class Mpd(Activity): | |
180 | ||
181 | @classmethod | |
182 | def create(cls, name, config): | |
183 | try: | |
184 | host = config.get('host', fallback='localhost') | |
185 | port = config.getint('port', fallback=6600) | |
186 | timeout = config.getint('timeout', fallback=5) | |
187 | return cls(name, host, port, timeout) | |
188 | except ValueError as error: | |
189 | raise ConfigurationError( | |
190 | 'Host port or timeout configuration wrong: {}'.format( | |
191 | error)) from error | |
192 | ||
193 | def __init__(self, name, host, port, timeout): | |
194 | Check.__init__(self, name) | |
195 | self._host = host | |
196 | self._port = port | |
197 | self._timeout = timeout | |
198 | ||
199 | def _get_state(self): | |
200 | from mpd import MPDClient | |
201 | client = MPDClient() | |
202 | client.timeout = self._timeout | |
203 | client.connect(self._host, self._port) | |
204 | state = client.status() | |
205 | client.close() | |
206 | client.disconnect() | |
207 | return state | |
208 | ||
209 | def check(self): | |
210 | try: | |
211 | state = self._get_state() | |
212 | if state['state'] == 'play': | |
213 | return 'MPD currently playing' | |
214 | else: | |
215 | return None | |
216 | except (ConnectionError, | |
217 | ConnectionRefusedError, | |
218 | socket.timeout, | |
219 | socket.gaierror) as error: | |
220 | raise TemporaryCheckError(error) from error | |
221 | ||
222 | ||
223 | class NetworkBandwidth(Activity): | |
224 | ||
225 | @classmethod | |
226 | def create(cls, name, config): | |
227 | try: | |
228 | interfaces = config['interfaces'] | |
229 | interfaces = interfaces.split(',') | |
230 | interfaces = [i.strip() for i in interfaces if i.strip()] | |
231 | if not interfaces: | |
232 | raise ConfigurationError('No interfaces configured') | |
233 | host_interfaces = psutil.net_if_addrs().keys() | |
234 | for interface in interfaces: | |
235 | if interface not in host_interfaces: | |
236 | raise ConfigurationError( | |
237 | 'Network interface {} does not exist'.format( | |
238 | interface)) | |
239 | threshold_send = config.getfloat('threshold_send', | |
240 | fallback=100) | |
241 | threshold_receive = config.getfloat('threshold_receive', | |
242 | fallback=100) | |
243 | return cls(name, interfaces, threshold_send, threshold_receive) | |
244 | except KeyError as error: | |
245 | raise ConfigurationError( | |
246 | 'Missing configuration key: {}'.format(error)) from error | |
247 | except ValueError as error: | |
248 | raise ConfigurationError( | |
249 | 'Threshold in wrong format: {}'.format(error)) from error | |
250 | ||
251 | def __init__(self, name, interfaces, threshold_send, threshold_receive): | |
252 | Check.__init__(self, name) | |
253 | self._interfaces = interfaces | |
254 | self._threshold_send = threshold_send | |
255 | self._threshold_receive = threshold_receive | |
256 | self._previous_values = psutil.net_io_counters(pernic=True) | |
257 | self._previous_time = time.time() | |
258 | ||
259 | def check(self): | |
260 | new_values = psutil.net_io_counters(pernic=True) | |
261 | new_time = time.time() | |
262 | for interface in self._interfaces: | |
263 | if interface not in new_values or \ | |
264 | interface not in self._previous_values: | |
265 | raise TemporaryCheckError( | |
266 | 'Interface {} is missing'.format(interface)) | |
267 | ||
268 | # send direction | |
269 | delta_send = new_values[interface].bytes_sent - \ | |
270 | self._previous_values[interface].bytes_sent | |
271 | rate_send = delta_send / (new_time - self._previous_time) | |
272 | if rate_send > self._threshold_send: | |
273 | return 'Interface {} sending rate {} byte/s '\ | |
274 | 'higher than threshold {}'.format( | |
275 | interface, rate_send, self._threshold_send) | |
276 | ||
277 | delta_receive = new_values[interface].bytes_recv - \ | |
278 | self._previous_values[interface].bytes_recv | |
279 | rate_receive = delta_receive / (new_time - self._previous_time) | |
280 | if rate_receive > self._threshold_receive: | |
281 | return 'Interface {} receive rate {} byte/s '\ | |
282 | 'higher than threshold {}'.format( | |
283 | interface, rate_receive, self._threshold_receive) | |
284 | ||
285 | ||
286 | class Ping(Activity): | |
287 | """Check if one or several hosts are reachable via ping.""" | |
288 | ||
289 | @classmethod | |
290 | def create(cls, name, config): | |
291 | try: | |
292 | hosts = config['hosts'].split(',') | |
293 | hosts = [h.strip() for h in hosts] | |
294 | return cls(name, hosts) | |
295 | except KeyError as error: | |
296 | raise ConfigurationError( | |
297 | 'Unable to determine hosts to ping: {}'.format( | |
298 | error)) from error | |
299 | ||
300 | def __init__(self, name, hosts): | |
301 | Check.__init__(self, name) | |
302 | self._hosts = hosts | |
303 | ||
304 | def check(self): | |
305 | for host in self._hosts: | |
306 | cmd = ['ping', '-q', '-c', '1', host] | |
307 | if subprocess.call(cmd, | |
308 | stdout=subprocess.DEVNULL, | |
309 | stderr=subprocess.DEVNULL) == 0: | |
310 | self.logger.debug("host " + host + " appears to be up") | |
311 | return 'Host {} is up'.format(host) | |
312 | return None | |
313 | ||
314 | ||
315 | class Processes(Activity): | |
316 | ||
317 | @classmethod | |
318 | def create(cls, name, config): | |
319 | try: | |
320 | processes = config['processes'].split(',') | |
321 | processes = [p.strip() for p in processes] | |
322 | return cls(name, processes) | |
323 | except KeyError as error: | |
324 | raise ConfigurationError( | |
325 | 'No processes to check specified') from error | |
326 | ||
327 | def __init__(self, name, processes): | |
328 | Check.__init__(self, name) | |
329 | self._processes = processes | |
330 | ||
331 | def check(self): | |
332 | for proc in psutil.process_iter(): | |
333 | try: | |
334 | pinfo = proc.name() | |
335 | for name in self._processes: | |
336 | if pinfo == name: | |
337 | return 'Process {} is running'.format(name) | |
338 | except psutil.NoSuchProcess: | |
339 | pass | |
340 | return None | |
341 | ||
342 | ||
343 | class Smb(Activity): | |
344 | ||
345 | @classmethod | |
346 | def create(cls, name, config): | |
347 | return cls(name) | |
348 | ||
349 | def check(self): | |
350 | try: | |
351 | status_output = subprocess.check_output( | |
352 | ['smbstatus', '-b']).decode('utf-8') | |
353 | except subprocess.CalledProcessError as error: | |
354 | raise SevereCheckError(error) from error | |
355 | ||
356 | self.logger.debug('Received status output:\n%s', | |
357 | status_output) | |
358 | ||
359 | connections = [] | |
360 | start_seen = False | |
361 | for line in status_output.splitlines(): | |
362 | if start_seen: | |
363 | connections.append(line) | |
364 | else: | |
365 | if line.startswith('----'): | |
366 | start_seen = True | |
367 | ||
368 | if connections: | |
369 | return 'SMB clients are connected:\n{}'.format( | |
370 | '\n'.join(connections)) | |
371 | else: | |
372 | return None | |
373 | ||
374 | ||
375 | class Users(Activity): | |
376 | ||
377 | @classmethod | |
378 | def create(cls, name, config): | |
379 | try: | |
380 | user_regex = re.compile( | |
381 | config.get('name', fallback='.*')) | |
382 | terminal_regex = re.compile( | |
383 | config.get('terminal', fallback='.*')) | |
384 | host_regex = re.compile( | |
385 | config.get('host', fallback='.*')) | |
386 | return cls(name, user_regex, terminal_regex, host_regex) | |
387 | except re.error as error: | |
388 | raise ConfigurationError( | |
389 | 'Regular expression is invalid: {}'.format(error)) from error | |
390 | ||
391 | def __init__(self, name, user_regex, terminal_regex, host_regex): | |
392 | Activity.__init__(self, name) | |
393 | self._user_regex = user_regex | |
394 | self._terminal_regex = terminal_regex | |
395 | self._host_regex = host_regex | |
396 | ||
397 | def check(self): | |
398 | for entry in psutil.users(): | |
399 | if self._user_regex.fullmatch(entry.name) is not None and \ | |
400 | self._terminal_regex.fullmatch( | |
401 | entry.terminal) is not None and \ | |
402 | self._host_regex.fullmatch(entry.host) is not None: | |
403 | self.logger.debug('User %s on terminal %s from host %s ' | |
404 | 'matches criteria.', entry.name, | |
405 | entry.terminal, entry.host) | |
406 | return 'User {user} is logged in on terminal {terminal} ' \ | |
407 | 'from {host} since {started}'.format( | |
408 | user=entry.name, terminal=entry.terminal, | |
409 | host=entry.host, started=entry.started) | |
410 | return None | |
411 | ||
412 | ||
413 | class XIdleTime(Activity): | |
414 | """Check that local X display have been idle long enough.""" | |
415 | ||
416 | @classmethod | |
417 | def create(cls, name, config): | |
418 | try: | |
419 | return cls(name, config.getint('timeout', fallback=600), | |
420 | config.get('method', fallback='sockets'), | |
421 | re.compile(config.get('ignore_if_process', | |
422 | fallback=r'a^')), | |
423 | re.compile(config.get('ignore_users', | |
424 | fallback=r'a^'))) | |
425 | except re.error as error: | |
426 | raise ConfigurationError( | |
427 | 'Regular expression is invalid: {}'.format(error)) from error | |
428 | except ValueError as error: | |
429 | raise ConfigurationError( | |
430 | 'Unable to parse configuration: {}'.format(error)) from error | |
431 | ||
432 | def __init__(self, name, timeout, method, | |
433 | ignore_process_re, ignore_users_re): | |
434 | Activity.__init__(self, name) | |
435 | self._timeout = timeout | |
436 | if method == 'sockets': | |
437 | self._provide_sessions = self._list_sessions_sockets | |
438 | elif method == 'logind': | |
439 | self._provide_sessions = self._list_sessions_logind | |
440 | else: | |
441 | raise ValueError( | |
442 | "Unknown session discovery method {}".format(method)) | |
443 | self._ignore_process_re = ignore_process_re | |
444 | self._ignore_users_re = ignore_users_re | |
445 | ||
446 | def _list_sessions_sockets(self): | |
447 | """List running X sessions by iterating the X sockets. | |
448 | ||
449 | This method assumes that X servers are run under the users using the | |
450 | server. | |
451 | """ | |
452 | sockets = glob.glob('/tmp/.X11-unix/X*') | |
453 | self.logger.debug('Found sockets: %s', sockets) | |
454 | ||
455 | results = [] | |
456 | for sock in sockets: | |
457 | # determine the number of the X display | |
458 | try: | |
459 | display = int(sock[len('/tmp/.X11-unix/X'):]) | |
460 | except ValueError as error: | |
461 | self.logger.warning( | |
462 | 'Cannot parse display number from socket %s. Skipping.', | |
463 | sock, exc_info=True) | |
464 | continue | |
465 | ||
466 | # determine the user of the display | |
467 | try: | |
468 | user = pwd.getpwuid(os.stat(sock).st_uid).pw_name | |
469 | except (FileNotFoundError, KeyError) as error: | |
470 | self.logger.warning( | |
471 | 'Cannot get the owning user from socket %s. Skipping.', | |
472 | sock, exc_info=True) | |
473 | continue | |
474 | ||
475 | results.append((display, user)) | |
476 | ||
477 | return results | |
478 | ||
479 | def _list_sessions_logind(self): | |
480 | """List running X sessions using logind. | |
481 | ||
482 | This method assumes that a ``Display`` variable is set in the logind | |
483 | sessions. | |
484 | """ | |
485 | results = [] | |
486 | for session_id, properties in list_logind_sessions(): | |
487 | if 'Name' in properties and 'Display' in properties: | |
488 | try: | |
489 | results.append( | |
490 | (int(properties['Display'].replace(':', '')), | |
491 | str(properties['Name']))) | |
492 | except ValueError as e: | |
493 | self.logger.warn( | |
494 | 'Unable to parse display from session properties %s', | |
495 | properties, exc_info=True) | |
496 | else: | |
497 | self.logger.debug( | |
498 | 'Skipping session %s because it does not contain ' | |
499 | 'a user name and a display', session_id) | |
500 | return results | |
501 | ||
502 | def _is_skip_process_running(self, user): | |
503 | user_processes = [] | |
504 | for process in psutil.process_iter(): | |
505 | try: | |
506 | if process.username() == user: | |
507 | user_processes.append(process.name()) | |
508 | except (psutil.NoSuchProcess, | |
509 | psutil.ZombieProcess, | |
510 | psutil.AccessDenied): | |
511 | # ignore processes which have disappeared etc. | |
512 | pass | |
513 | ||
514 | for process in user_processes: | |
515 | if self._ignore_process_re.match(process) is not None: | |
516 | self.logger.debug( | |
517 | "Process %s with pid %s matches the ignore regex '%s'." | |
518 | " Skipping idle time check for this user.", | |
519 | process.name(), process.pid, self._ignore_process_re) | |
520 | return True | |
521 | ||
522 | return False | |
523 | ||
524 | def check(self): | |
525 | for display, user in self._provide_sessions(): | |
526 | self.logger.info('Checking display %s of user %s', display, user) | |
527 | ||
528 | # check whether this users should be ignored completely | |
529 | if self._ignore_users_re.match(user) is not None: | |
530 | self.logger.debug("Skipping user '%s' due to request", user) | |
531 | continue | |
532 | ||
533 | # check whether any of the running processes of this user matches | |
534 | # the ignore regular expression. In that case we skip idletime | |
535 | # checking because we assume the user has a process running that | |
536 | # inevitably tampers with the idle time. | |
537 | if self._is_skip_process_running(user): | |
538 | continue | |
539 | ||
540 | # prepare the environment for the xprintidle call | |
541 | env = copy.deepcopy(os.environ) | |
542 | env['DISPLAY'] = ':{}'.format(display) | |
543 | env['XAUTHORITY'] = os.path.join(os.path.expanduser('~' + user), | |
544 | '.Xauthority') | |
545 | ||
546 | try: | |
547 | idle_time = subprocess.check_output( | |
548 | ['sudo', '-u', user, 'xprintidle'], env=env) | |
549 | idle_time = float(idle_time.strip()) / 1000.0 | |
550 | except (subprocess.CalledProcessError, ValueError) as error: | |
551 | self.logger.warning( | |
552 | 'Unable to determine the idle time for display %s.', | |
553 | display, exc_info=True) | |
554 | raise TemporaryCheckError(error) from error | |
555 | ||
556 | self.logger.debug( | |
557 | 'Idle time for display %s of user %s is %s seconds.', | |
558 | display, user, idle_time) | |
559 | ||
560 | if idle_time < self._timeout: | |
561 | return 'X session {} of user {} ' \ | |
562 | 'has idle time {} < threshold {}'.format( | |
563 | display, user, idle_time, self._timeout) | |
564 | ||
565 | return None | |
566 | ||
567 | ||
568 | class LogindSessionsIdle(Activity): | |
569 | """Prevents suspending in case a logind session is marked not idle. | |
570 | ||
571 | The decision is based on the ``IdleHint`` property of logind sessions. | |
572 | """ | |
573 | ||
574 | @classmethod | |
575 | def create(cls, name, config): | |
576 | types = config.get('types', fallback='tty,x11,wayland') | |
577 | types = [t.strip() for t in types.split(',')] | |
578 | states = config.get('states', fallback='active,online') | |
579 | states = [t.strip() for t in states.split(',')] | |
580 | return cls(name, types, states) | |
581 | ||
582 | def __init__(self, name, types, states): | |
583 | Activity.__init__(self, name) | |
584 | self._types = types | |
585 | self._states = states | |
586 | ||
587 | def check(self): | |
588 | for session_id, properties in list_logind_sessions(): | |
589 | self.logger.debug('Session %s properties: %s', | |
590 | session_id, properties) | |
591 | ||
592 | if properties['Type'] not in self._types: | |
593 | self.logger.debug('Ignoring session of wrong type %s', | |
594 | properties['type']) | |
595 | continue | |
596 | if properties['State'] not in self._states: | |
597 | self.logger.debug('Ignoring session because its state is %s', | |
598 | properties['State']) | |
599 | continue | |
600 | ||
601 | if properties['IdleHint'] == 'no': | |
602 | return 'Login session {} is not idle'.format( | |
603 | session_id) | |
604 | ||
605 | return None | |
606 | ||
607 | ||
608 | class XPath(XPathMixin, Activity): | |
609 | ||
610 | def __init__(self, name, **kwargs): | |
611 | Activity.__init__(self, name) | |
612 | XPathMixin.__init__(self, **kwargs) | |
613 | ||
614 | def check(self): | |
615 | if self.evaluate(): | |
616 | return "XPath matches for url " + self._url |
0 | import configparser | |
1 | from typing import Any, Dict, Iterable, Optional | |
2 | ||
3 | from . import ConfigurationError, SevereCheckError, TemporaryCheckError | |
4 | ||
5 | ||
6 | class CommandMixin: | |
7 | """Mixin for configuring checks based on external commands.""" | |
8 | ||
9 | @classmethod | |
10 | def create(cls, name: str, config: configparser.SectionProxy): | |
11 | try: | |
12 | return cls(name, config['command'].strip()) # type: ignore | |
13 | except KeyError as error: | |
14 | raise ConfigurationError( | |
15 | 'Missing command specification') from error | |
16 | ||
17 | def __init__(self, command: str) -> None: | |
18 | self._command = command | |
19 | ||
20 | ||
21 | class NetworkMixin: | |
22 | ||
23 | @classmethod | |
24 | def collect_init_args( | |
25 | cls, config: configparser.SectionProxy) -> Dict[str, Any]: | |
26 | try: | |
27 | args = {} # type: Dict[str, Any] | |
28 | args['timeout'] = config.getint('timeout', fallback=5) | |
29 | args['url'] = config['url'] | |
30 | args['username'] = config.get('username') | |
31 | args['password'] = config.get('password') | |
32 | if (args['username'] is None) != (args['password'] is None): | |
33 | raise ConfigurationError('Username and password must be set') | |
34 | return args | |
35 | except ValueError as error: | |
36 | raise ConfigurationError( | |
37 | 'Configuration error ' + str(error)) from error | |
38 | except KeyError as error: | |
39 | raise ConfigurationError( | |
40 | 'Lacks ' + str(error) + ' config entry') from error | |
41 | ||
42 | @classmethod | |
43 | def create(cls, name: str, config: configparser.SectionProxy): | |
44 | return cls(name, **cls.collect_init_args(config)) # type: ignore | |
45 | ||
46 | def __init__(self, url: str, timeout: int, | |
47 | username: Optional[str] = None, | |
48 | password: Optional[str] = None) -> None: | |
49 | self._url = url | |
50 | self._timeout = timeout | |
51 | self._username = username | |
52 | self._password = password | |
53 | ||
54 | def request(self): | |
55 | import requests | |
56 | from requests.auth import HTTPBasicAuth, HTTPDigestAuth | |
57 | import requests.exceptions | |
58 | ||
59 | auth_map = { | |
60 | 'basic': HTTPBasicAuth, | |
61 | 'digest': HTTPDigestAuth, | |
62 | } | |
63 | ||
64 | session = requests.Session() | |
65 | try: | |
66 | from requests_file import FileAdapter | |
67 | session.mount('file://', FileAdapter()) | |
68 | except ImportError: | |
69 | pass | |
70 | ||
71 | try: | |
72 | reply = session.get(self._url, timeout=self._timeout) | |
73 | ||
74 | # replace reply with an authenticated version if credentials are | |
75 | # available and the server has requested authentication | |
76 | if self._username and self._password and reply.status_code == 401: | |
77 | auth_scheme = reply.headers[ | |
78 | 'WWW-Authenticate'].split(' ')[0].lower() | |
79 | if auth_scheme not in auth_map: | |
80 | raise SevereCheckError( | |
81 | 'Unsupported authentication scheme {}'.format( | |
82 | auth_scheme)) | |
83 | auth = auth_map[auth_scheme](self._username, self._password) | |
84 | reply = session.get( | |
85 | self._url, timeout=self._timeout, auth=auth) | |
86 | ||
87 | reply.raise_for_status() | |
88 | return reply | |
89 | except requests.exceptions.RequestException as error: | |
90 | raise TemporaryCheckError(error) from error | |
91 | ||
92 | ||
93 | class XPathMixin(NetworkMixin): | |
94 | ||
95 | @classmethod | |
96 | def collect_init_args(cls, config) -> Dict[str, Any]: | |
97 | from lxml import etree | |
98 | try: | |
99 | args = NetworkMixin.collect_init_args(config) | |
100 | args['xpath'] = config['xpath'].strip() | |
101 | # validate the expression | |
102 | try: | |
103 | etree.fromstring('<a></a>').xpath(args['xpath']) | |
104 | except etree.XPathEvalError as error: | |
105 | raise ConfigurationError( | |
106 | 'Invalid xpath expression: ' + args['xpath']) from error | |
107 | return args | |
108 | except ValueError as error: | |
109 | raise ConfigurationError( | |
110 | 'Configuration error ' + str(error)) from error | |
111 | except KeyError as error: | |
112 | raise ConfigurationError( | |
113 | 'Lacks ' + str(error) + ' config entry') from error | |
114 | ||
115 | @classmethod | |
116 | def create(cls, name: str, config: configparser.SectionProxy): | |
117 | return cls(name, **cls.collect_init_args(config)) | |
118 | ||
119 | def __init__(self, xpath: str, **kwargs) -> None: | |
120 | NetworkMixin.__init__(self, **kwargs) | |
121 | self._xpath = xpath | |
122 | ||
123 | def evaluate(self) -> Iterable[Any]: | |
124 | import requests | |
125 | import requests.exceptions | |
126 | from lxml import etree | |
127 | ||
128 | try: | |
129 | reply = self.request().content | |
130 | root = etree.fromstring(reply) | |
131 | return root.xpath(self._xpath) | |
132 | except requests.exceptions.RequestException as error: | |
133 | raise TemporaryCheckError(error) from error | |
134 | except etree.XMLSyntaxError as error: | |
135 | raise TemporaryCheckError(error) from error |
0 | import datetime | |
1 | from io import BytesIO | |
2 | import subprocess | |
3 | ||
4 | from .util import CommandMixin, NetworkMixin, XPathMixin | |
5 | from .. import ConfigurationError, TemporaryCheckError, Wakeup | |
6 | ||
7 | ||
8 | class Calendar(NetworkMixin, Wakeup): | |
9 | """Uses an ical calendar to wake up on the next scheduled event.""" | |
10 | ||
11 | def __init__(self, name: str, **kwargs) -> None: | |
12 | NetworkMixin.__init__(self, **kwargs) | |
13 | Wakeup.__init__(self, name) | |
14 | ||
15 | def check(self, timestamp): | |
16 | from ..util.ical import list_calendar_events | |
17 | ||
18 | response = self.request() | |
19 | ||
20 | end = timestamp + datetime.timedelta(weeks=6 * 4) | |
21 | events = list_calendar_events(BytesIO(response.content), | |
22 | timestamp, end) | |
23 | # Filter out currently active events. They are not our business. | |
24 | events = [e for e in events if e.start >= timestamp] | |
25 | ||
26 | if events: | |
27 | return events[0].start | |
28 | ||
29 | ||
30 | class File(Wakeup): | |
31 | """Determines scheduled wake ups from the contents of a file on disk. | |
32 | ||
33 | File contents are interpreted as a Unix timestamp in seconds UTC. | |
34 | """ | |
35 | ||
36 | @classmethod | |
37 | def create(cls, name, config): | |
38 | try: | |
39 | path = config['path'] | |
40 | return cls(name, path) | |
41 | except KeyError as error: | |
42 | raise ConfigurationError('Missing option path') from error | |
43 | ||
44 | def __init__(self, name, path): | |
45 | Wakeup.__init__(self, name) | |
46 | self._path = path | |
47 | ||
48 | def check(self, timestamp): | |
49 | try: | |
50 | with open(self._path, 'r') as time_file: | |
51 | return datetime.datetime.fromtimestamp( | |
52 | float(time_file.readlines()[0].strip()), | |
53 | datetime.timezone.utc) | |
54 | except FileNotFoundError: | |
55 | # this is ok | |
56 | pass | |
57 | except (ValueError, PermissionError, IOError) as error: | |
58 | raise TemporaryCheckError(error) from error | |
59 | ||
60 | ||
61 | class Command(CommandMixin, Wakeup): | |
62 | """Determine wake up times based on an external command. | |
63 | ||
64 | The called command must return a timestamp in UTC or nothing in case no | |
65 | wake up is planned. | |
66 | """ | |
67 | ||
68 | def __init__(self, name, command): | |
69 | CommandMixin.__init__(self, command) | |
70 | Wakeup.__init__(self, name) | |
71 | ||
72 | def check(self, timestamp): | |
73 | try: | |
74 | output = subprocess.check_output(self._command, | |
75 | shell=True).splitlines()[0] | |
76 | self.logger.debug('Command %s succeeded with output %s', | |
77 | self._command, output) | |
78 | if output.strip(): | |
79 | return datetime.datetime.fromtimestamp( | |
80 | float(output.strip()), | |
81 | datetime.timezone.utc) | |
82 | ||
83 | except (subprocess.CalledProcessError, ValueError) as error: | |
84 | raise TemporaryCheckError(error) from error | |
85 | ||
86 | ||
87 | class Periodic(Wakeup): | |
88 | """Always indicates a wake up after a specified delta of time from now on. | |
89 | ||
90 | Use this to periodically wake up a system. | |
91 | """ | |
92 | ||
93 | @classmethod | |
94 | def create(cls, name, config): | |
95 | try: | |
96 | kwargs = {} | |
97 | kwargs[config['unit']] = float(config['value']) | |
98 | return cls(name, datetime.timedelta(**kwargs)) | |
99 | except (ValueError, KeyError, TypeError) as error: | |
100 | raise ConfigurationError(str(error)) | |
101 | ||
102 | def __init__(self, name: str, delta: datetime.timedelta) -> None: | |
103 | Wakeup.__init__(self, name) | |
104 | self._delta = delta | |
105 | ||
106 | def check(self, timestamp): | |
107 | return timestamp + self._delta | |
108 | ||
109 | ||
110 | class XPath(XPathMixin, Wakeup): | |
111 | """Determine wake up times from a network resource using XPath expressions. | |
112 | ||
113 | The matched results are expected to represent timestamps in seconds UTC. | |
114 | """ | |
115 | ||
116 | def __init__(self, name, **kwargs): | |
117 | Wakeup.__init__(self, name) | |
118 | XPathMixin.__init__(self, **kwargs) | |
119 | ||
120 | def convert_result(self, result, timestamp): | |
121 | return datetime.datetime.fromtimestamp(float(result), | |
122 | datetime.timezone.utc) | |
123 | ||
124 | def check(self, timestamp): | |
125 | matches = self.evaluate() | |
126 | try: | |
127 | if matches: | |
128 | return min(self.convert_result(m, timestamp) | |
129 | for m in matches) | |
130 | except TypeError as error: | |
131 | raise TemporaryCheckError( | |
132 | 'XPath returned a result that is not a string: ' + str(error)) | |
133 | except ValueError as error: | |
134 | raise TemporaryCheckError('Result cannot be parsed: ' + str(error)) | |
135 | ||
136 | ||
137 | class XPathDelta(XPath): | |
138 | ||
139 | UNITS = ['days', 'seconds', 'microseconds', 'milliseconds', | |
140 | 'minutes', 'hours', 'weeks'] | |
141 | ||
142 | @classmethod | |
143 | def create(cls, name, config): | |
144 | try: | |
145 | args = XPath.collect_init_args(config) | |
146 | args['unit'] = config.get('unit', fallback='minutes') | |
147 | return cls(name, **args) | |
148 | except ValueError as error: | |
149 | raise ConfigurationError(str(error)) | |
150 | ||
151 | def __init__(self, name, unit, **kwargs): | |
152 | if unit not in self.UNITS: | |
153 | raise ValueError('Unsupported unit') | |
154 | XPath.__init__(self, name, **kwargs) | |
155 | self._unit = unit | |
156 | ||
157 | def convert_result(self, result, timestamp): | |
158 | kwargs = {} | |
159 | kwargs[self._unit] = float(result) | |
160 | return timestamp + datetime.timedelta(**kwargs) |
0 | import logging | |
1 | ||
2 | ||
3 | def logger_by_class(klass, name=None): | |
4 | return logging.getLogger( | |
5 | '{module}.{klass}{name}'.format( | |
6 | module=klass.__module__, | |
7 | klass=klass.__name__, | |
8 | name='.{}'.format(name) if name else '')) | |
9 | ||
10 | ||
11 | def logger_by_class_instance(instance, name=None): | |
12 | return logger_by_class(instance.__class__, name=name) |
0 | from datetime import date, datetime, timedelta | |
1 | from typing import Dict, IO, Iterable, List, Mapping | |
2 | ||
3 | from dateutil.rrule import rruleset, rrulestr | |
4 | import icalendar | |
5 | import icalendar.cal | |
6 | import pytz | |
7 | import tzlocal | |
8 | ||
9 | ||
10 | class CalendarEvent: | |
11 | ||
12 | def __init__(self, summary: str, start: datetime, end: datetime) -> None: | |
13 | self.summary = summary | |
14 | self.start = start | |
15 | self.end = end | |
16 | ||
17 | def __str__(self) -> str: | |
18 | return 'CalendarEvent[summary={}, start={}, end={}]'.format( | |
19 | self.summary, self.start, self.end) | |
20 | ||
21 | ||
22 | def _expand_rrule_all_day(rrule: str, | |
23 | start: date, | |
24 | exclusions: Iterable, | |
25 | start_at: datetime, | |
26 | end_at: datetime) -> Iterable[date]: | |
27 | """Expand an rrule for all-day events. | |
28 | ||
29 | To my mind, these events cannot have changes, just exclusions, because | |
30 | changes only affect the time, which doesn't exist for all-day events. | |
31 | """ | |
32 | ||
33 | rules = rruleset() | |
34 | rules.rrule(rrulestr(rrule, dtstart=start, ignoretz=True)) | |
35 | ||
36 | # add exclusions | |
37 | if exclusions: | |
38 | for xdate in exclusions: | |
39 | rules.exdate(datetime.combine( | |
40 | xdate.dts[0].dt, datetime.min.time())) | |
41 | ||
42 | dates = [] | |
43 | # reduce start and end to datetimes without timezone that just represent a | |
44 | # date at midnight. | |
45 | for candidate in rules.between( | |
46 | datetime.combine(start_at.date(), datetime.min.time()), | |
47 | datetime.combine(end_at.date(), datetime.min.time()), | |
48 | inc=True): | |
49 | dates.append(candidate.date()) | |
50 | return dates | |
51 | ||
52 | ||
53 | def _expand_rrule(rrule: str, | |
54 | start: datetime, | |
55 | instance_duration: timedelta, | |
56 | exclusions: Iterable, | |
57 | changes: Iterable[icalendar.cal.Event], | |
58 | start_at: datetime, | |
59 | end_at: datetime): | |
60 | ||
61 | # unify everything to a single timezone and then strip it to handle DST | |
62 | # changes correctly | |
63 | orig_tz = start.tzinfo | |
64 | start = start.replace(tzinfo=None) | |
65 | start_at = start_at.astimezone(orig_tz).replace(tzinfo=None) | |
66 | end_at = end_at.astimezone(orig_tz).replace(tzinfo=None) | |
67 | ||
68 | rules = rruleset() | |
69 | first_rule = rrulestr(rrule, dtstart=start, ignoretz=True) | |
70 | ||
71 | # apply the same timezone logic for the until part of the rule after | |
72 | # parsing it. | |
73 | if first_rule._until: | |
74 | first_rule._until = pytz.utc.localize( | |
75 | first_rule._until).astimezone(orig_tz).replace(tzinfo=None) | |
76 | ||
77 | rules.rrule(first_rule) | |
78 | ||
79 | # add exclusions | |
80 | if exclusions: | |
81 | for xdate in exclusions: | |
82 | try: | |
83 | # also in this case, unify and strip the timezone | |
84 | rules.exdate( | |
85 | xdate.dts[0].dt.astimezone(orig_tz).replace(tzinfo=None)) | |
86 | except AttributeError: | |
87 | pass | |
88 | ||
89 | # add events that were changed | |
90 | for change in changes: | |
91 | # same timezone mangling applies here | |
92 | rules.exdate(change.get('recurrence-id').dt.astimezone( | |
93 | orig_tz).replace(tzinfo=None)) | |
94 | ||
95 | # expand the rrule | |
96 | dates = [] | |
97 | for candidate in rules.between(start_at - instance_duration, end_at, | |
98 | inc=True): | |
99 | localized = orig_tz.localize(candidate) # type: ignore | |
100 | dates.append(localized) | |
101 | return dates | |
102 | ||
103 | ||
104 | ChangeMapping = Mapping[str, Iterable[icalendar.cal.Event]] | |
105 | ||
106 | ||
107 | def _collect_recurrence_changes(calendar: icalendar.Calendar)-> ChangeMapping: | |
108 | ConcreteChangeMapping = Dict[str, List[icalendar.cal.Event]] # noqa | |
109 | recurring_changes = {} # type: ConcreteChangeMapping | |
110 | for component in calendar.walk(): | |
111 | if component.name != 'VEVENT': | |
112 | continue | |
113 | if component.get('recurrence-id'): | |
114 | if component.get('uid') not in recurring_changes: | |
115 | recurring_changes[component.get('uid')] = [] | |
116 | recurring_changes[component.get('uid')].append(component) | |
117 | return recurring_changes | |
118 | ||
119 | ||
120 | def list_calendar_events(data: IO[bytes], | |
121 | start_at: datetime, | |
122 | end_at: datetime) -> Iterable[CalendarEvent]: | |
123 | """List all relevant calendar events in the provided interval. | |
124 | ||
125 | Args: | |
126 | data: | |
127 | A stream with icalendar data | |
128 | start_at: | |
129 | include events overlapping with this time (inclusive) | |
130 | end_at: | |
131 | do not include events that start after or exactly at this time | |
132 | """ | |
133 | ||
134 | def is_aware(dt: datetime) -> bool: | |
135 | return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None | |
136 | ||
137 | # some useful notes: | |
138 | # * end times and dates are non-inclusive for ical events | |
139 | # * start and end are dates for all-day events | |
140 | ||
141 | calendar = icalendar.Calendar.from_ical(data.read()) | |
142 | ||
143 | # Do a first pass through the calendar to collect all exclusions to | |
144 | # recurring events so that they can be handled when expanding recurrences. | |
145 | recurring_changes = _collect_recurrence_changes(calendar) | |
146 | ||
147 | events = [] | |
148 | for component in calendar.walk(): | |
149 | if component.name != 'VEVENT': | |
150 | continue | |
151 | ||
152 | summary = component.get('summary') | |
153 | start = component.get('dtstart').dt | |
154 | end = component.get('dtend').dt | |
155 | exclusions = component.get('exdate') | |
156 | if exclusions and not isinstance(exclusions, list): | |
157 | exclusions = [exclusions] | |
158 | ||
159 | # Check whether dates are floating and localize with local time if so. | |
160 | # Only works in case of non-all-day events, which are dates, not | |
161 | # datetimes. | |
162 | if isinstance(start, datetime) and not is_aware(start): | |
163 | assert not is_aware(end) | |
164 | local_time = tzlocal.get_localzone() | |
165 | start = local_time.localize(start) | |
166 | end = local_time.localize(end) | |
167 | ||
168 | length = end - start | |
169 | ||
170 | if component.get('rrule'): | |
171 | rrule = component.get('rrule').to_ical().decode('utf-8') | |
172 | changes = [] # type: Iterable[icalendar.cal.Event] | |
173 | if component.get('uid') in recurring_changes: | |
174 | changes = recurring_changes[component.get('uid')] | |
175 | ||
176 | if isinstance(start, datetime): | |
177 | # complex processing in case of normal events | |
178 | for local_start in _expand_rrule( | |
179 | rrule, | |
180 | start, | |
181 | length, | |
182 | exclusions, | |
183 | changes, | |
184 | start_at, | |
185 | end_at): | |
186 | local_end = local_start + length | |
187 | events.append(CalendarEvent( | |
188 | summary, local_start, local_end)) | |
189 | else: | |
190 | # simplified processing for all-day events | |
191 | for local_start in _expand_rrule_all_day( | |
192 | rrule, | |
193 | start, | |
194 | exclusions, | |
195 | start_at, | |
196 | end_at): | |
197 | local_end = local_start + timedelta(days=1) | |
198 | events.append(CalendarEvent( | |
199 | summary, local_start, local_end)) | |
200 | else: | |
201 | # same distinction here as above | |
202 | if isinstance(start, datetime): | |
203 | # single events | |
204 | if end > start_at and start < end_at: | |
205 | events.append(CalendarEvent(str(summary), start, end)) | |
206 | else: | |
207 | # all-day events | |
208 | if end > start_at.date() and start <= end_at.date(): | |
209 | events.append(CalendarEvent(str(summary), start, end)) | |
210 | ||
211 | return sorted(events, key=lambda e: e.start) |
0 | from typing import Iterable, Tuple | |
1 | ||
2 | ||
3 | def list_logind_sessions() -> Iterable[Tuple[str, dict]]: | |
4 | """List running logind sessions and their properties. | |
5 | ||
6 | Returns: | |
7 | list of (session_id, properties dict): | |
8 | A list with tuples of sessions ids and their associated properties | |
9 | represented as dicts. | |
10 | """ | |
11 | import dbus | |
12 | bus = dbus.SystemBus() | |
13 | login1 = bus.get_object("org.freedesktop.login1", | |
14 | "/org/freedesktop/login1") | |
15 | ||
16 | sessions = login1.ListSessions( | |
17 | dbus_interface='org.freedesktop.login1.Manager') | |
18 | ||
19 | results = [] | |
20 | for session_id, path in [(s[0], s[4]) for s in sessions]: | |
21 | session = bus.get_object('org.freedesktop.login1', path) | |
22 | properties_interface = dbus.Interface( | |
23 | session, 'org.freedesktop.DBus.Properties') | |
24 | properties = properties_interface.GetAll( | |
25 | 'org.freedesktop.login1.Session') | |
26 | results.append((session_id, properties)) | |
27 | ||
28 | return results |
0 | import configparser | |
1 | import logging | |
2 | import os.path | |
3 | import pwd | |
4 | import re | |
5 | import socket | |
6 | import subprocess | |
7 | import sys | |
8 | ||
9 | import psutil | |
10 | ||
11 | import pytest | |
12 | ||
13 | import requests | |
14 | import requests.exceptions | |
15 | ||
16 | import autosuspend | |
17 | ||
18 | ||
19 | class TestCheck(object): | |
20 | ||
21 | class DummyCheck(autosuspend.Check): | |
22 | ||
23 | @classmethod | |
24 | def create(cls, name, config): | |
25 | pass | |
26 | ||
27 | def check(self): | |
28 | pass | |
29 | ||
30 | def test_name(self): | |
31 | name = 'test' | |
32 | assert self.DummyCheck(name).name == name | |
33 | ||
34 | def test_name_default(self): | |
35 | assert self.DummyCheck().name is not None | |
36 | ||
37 | def test_str(self): | |
38 | assert isinstance(str(self.DummyCheck('test')), str) | |
39 | ||
40 | ||
41 | class TestSmb(object): | |
42 | ||
43 | def test_no_connections(self, monkeypatch): | |
44 | def return_data(*args, **kwargs): | |
45 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
46 | 'smbstatus_no_connections'), 'rb') as f: | |
47 | return f.read() | |
48 | monkeypatch.setattr(subprocess, 'check_output', return_data) | |
49 | ||
50 | assert autosuspend.Smb('foo').check() is None | |
51 | ||
52 | def test_with_connections(self, monkeypatch): | |
53 | def return_data(*args, **kwargs): | |
54 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
55 | 'smbstatus_with_connections'), 'rb') as f: | |
56 | return f.read() | |
57 | monkeypatch.setattr(subprocess, 'check_output', return_data) | |
58 | ||
59 | assert autosuspend.Smb('foo').check() is not None | |
60 | assert len(autosuspend.Smb('foo').check().splitlines()) == 3 | |
61 | ||
62 | def test_call_error(self, mocker): | |
63 | mocker.patch('subprocess.check_output', | |
64 | side_effect=subprocess.CalledProcessError(2, 'cmd')) | |
65 | ||
66 | with pytest.raises(autosuspend.SevereCheckError): | |
67 | autosuspend.Smb('foo').check() | |
68 | ||
69 | def test_create(self): | |
70 | assert isinstance(autosuspend.Smb.create('name', None), | |
71 | autosuspend.Smb) | |
72 | ||
73 | ||
74 | class TestUsers(object): | |
75 | ||
76 | @staticmethod | |
77 | def create_suser(name, terminal, host, started, pid): | |
78 | try: | |
79 | return psutil._common.suser(name, terminal, host, started, pid) | |
80 | except TypeError: | |
81 | # psutil 5.0 | |
82 | return psutil._common.suser(name, terminal, host, started) | |
83 | ||
84 | def test_no_users(self, monkeypatch): | |
85 | ||
86 | def data(): | |
87 | return [] | |
88 | monkeypatch.setattr(psutil, 'users', data) | |
89 | ||
90 | assert autosuspend.Users('users', re.compile('.*'), re.compile('.*'), | |
91 | re.compile('.*')).check() is None | |
92 | ||
93 | def test_smoke(self): | |
94 | autosuspend.Users('users', re.compile('.*'), re.compile('.*'), | |
95 | re.compile('.*')).check() | |
96 | ||
97 | def test_matching_users(self, monkeypatch): | |
98 | ||
99 | def data(): | |
100 | return [self.create_suser('foo', 'pts1', 'host', 12345, 12345)] | |
101 | monkeypatch.setattr(psutil, 'users', data) | |
102 | ||
103 | assert autosuspend.Users('users', re.compile('.*'), re.compile('.*'), | |
104 | re.compile('.*')).check() is not None | |
105 | ||
106 | def test_non_matching_user(self, monkeypatch): | |
107 | ||
108 | def data(): | |
109 | return [self.create_suser('foo', 'pts1', 'host', 12345, 12345)] | |
110 | monkeypatch.setattr(psutil, 'users', data) | |
111 | ||
112 | assert autosuspend.Users('users', re.compile('narf'), re.compile('.*'), | |
113 | re.compile('.*')).check() is None | |
114 | ||
115 | def test_create(self): | |
116 | parser = configparser.ConfigParser() | |
117 | parser.read_string('''[section] | |
118 | name = name.*name | |
119 | terminal = term.*term | |
120 | host = host.*host''') | |
121 | ||
122 | check = autosuspend.Users.create('name', parser['section']) | |
123 | ||
124 | assert check._user_regex == re.compile('name.*name') | |
125 | assert check._terminal_regex == re.compile('term.*term') | |
126 | assert check._host_regex == re.compile('host.*host') | |
127 | ||
128 | def test_create_regex_error(self): | |
129 | parser = configparser.ConfigParser() | |
130 | parser.read_string('''[section] | |
131 | name = name.*name | |
132 | terminal = term.[[a-9]term | |
133 | host = host.*host''') | |
134 | ||
135 | with pytest.raises(autosuspend.ConfigurationError): | |
136 | autosuspend.Users.create('name', parser['section']) | |
137 | ||
138 | ||
139 | class TestProcesses(object): | |
140 | ||
141 | class StubProcess(object): | |
142 | ||
143 | def __init__(self, name): | |
144 | self._name = name | |
145 | ||
146 | def name(self): | |
147 | return self._name | |
148 | ||
149 | class RaisingProcess(object): | |
150 | ||
151 | def name(self): | |
152 | raise psutil.NoSuchProcess(42) | |
153 | ||
154 | def test_matching_process(self, monkeypatch): | |
155 | ||
156 | def data(): | |
157 | return [self.StubProcess('blubb'), self.StubProcess('nonmatching')] | |
158 | monkeypatch.setattr(psutil, 'process_iter', data) | |
159 | ||
160 | assert autosuspend.Processes( | |
161 | 'foo', ['dummy', 'blubb', 'other']).check() is not None | |
162 | ||
163 | def test_ignore_no_such_process(self, monkeypatch): | |
164 | ||
165 | def data(): | |
166 | return [self.RaisingProcess()] | |
167 | monkeypatch.setattr(psutil, 'process_iter', data) | |
168 | ||
169 | autosuspend.Processes('foo', ['dummy']).check() | |
170 | ||
171 | def test_non_matching_process(self, monkeypatch): | |
172 | ||
173 | def data(): | |
174 | return [self.StubProcess('asdfasdf'), | |
175 | self.StubProcess('nonmatching')] | |
176 | monkeypatch.setattr(psutil, 'process_iter', data) | |
177 | ||
178 | assert autosuspend.Processes( | |
179 | 'foo', ['dummy', 'blubb', 'other']).check() is None | |
180 | ||
181 | def test_create(self): | |
182 | parser = configparser.ConfigParser() | |
183 | parser.read_string('''[section] | |
184 | processes = foo, bar, narf''') | |
185 | assert autosuspend.Processes.create( | |
186 | 'name', parser['section'])._processes == ['foo', 'bar', 'narf'] | |
187 | ||
188 | def test_create_no_entry(self): | |
189 | parser = configparser.ConfigParser() | |
190 | parser.read_string('''[section]''') | |
191 | with pytest.raises(autosuspend.ConfigurationError): | |
192 | autosuspend.Processes.create('name', parser['section']) | |
193 | ||
194 | ||
195 | class TestActiveConnection(object): | |
196 | ||
197 | MY_PORT = 22 | |
198 | MY_ADDRESS = '123.456.123.456' | |
199 | ||
200 | def test_smoke(self): | |
201 | autosuspend.ActiveConnection('foo', [22]).check() | |
202 | ||
203 | def test_connected(self, monkeypatch): | |
204 | ||
205 | def addresses(): | |
206 | return {'dummy': [psutil._common.snic( | |
207 | socket.AF_INET, self.MY_ADDRESS, '255.255.255.0', | |
208 | None, None)]} | |
209 | ||
210 | def connections(): | |
211 | return [psutil._common.sconn( | |
212 | -1, socket.AF_INET, socket.SOCK_STREAM, | |
213 | (self.MY_ADDRESS, self.MY_PORT), | |
214 | ('42.42.42.42', 42), 'ESTABLISHED', None)] | |
215 | ||
216 | monkeypatch.setattr(psutil, 'net_if_addrs', addresses) | |
217 | monkeypatch.setattr(psutil, 'net_connections', connections) | |
218 | ||
219 | assert autosuspend.ActiveConnection( | |
220 | 'foo', [10, self.MY_PORT, 30]).check() is not None | |
221 | ||
222 | @pytest.mark.parametrize("connection", [ | |
223 | # not my port | |
224 | psutil._common.sconn(-1, | |
225 | socket.AF_INET, socket.SOCK_STREAM, | |
226 | (MY_ADDRESS, 32), | |
227 | ('42.42.42.42', 42), | |
228 | 'ESTABLISHED', None), | |
229 | # not my local address | |
230 | psutil._common.sconn(-1, | |
231 | socket.AF_INET, socket.SOCK_STREAM, | |
232 | ('33.33.33.33', MY_PORT), | |
233 | ('42.42.42.42', 42), | |
234 | 'ESTABLISHED', None), | |
235 | # not my established | |
236 | psutil._common.sconn(-1, | |
237 | socket.AF_INET, socket.SOCK_STREAM, | |
238 | (MY_ADDRESS, MY_PORT), | |
239 | ('42.42.42.42', 42), | |
240 | 'NARF', None), | |
241 | # I am the client | |
242 | psutil._common.sconn(-1, | |
243 | socket.AF_INET, socket.SOCK_STREAM, | |
244 | ('42.42.42.42', 42), | |
245 | (MY_ADDRESS, MY_PORT), | |
246 | 'NARF', None), | |
247 | ]) | |
248 | def test_not_connected(self, monkeypatch, connection): | |
249 | ||
250 | def addresses(): | |
251 | return {'dummy': [psutil._common.snic( | |
252 | socket.AF_INET, self.MY_ADDRESS, '255.255.255.0', | |
253 | None, None)]} | |
254 | ||
255 | def connections(): | |
256 | return [connection] | |
257 | ||
258 | monkeypatch.setattr(psutil, 'net_if_addrs', addresses) | |
259 | monkeypatch.setattr(psutil, 'net_connections', connections) | |
260 | ||
261 | assert autosuspend.ActiveConnection( | |
262 | 'foo', [10, self.MY_PORT, 30]).check() is None | |
263 | ||
264 | def test_create(self): | |
265 | parser = configparser.ConfigParser() | |
266 | parser.read_string('''[section] | |
267 | ports = 10,20,30''') | |
268 | assert autosuspend.ActiveConnection.create( | |
269 | 'name', parser['section'])._ports == set([10, 20, 30]) | |
270 | ||
271 | def test_create_no_entry(self): | |
272 | parser = configparser.ConfigParser() | |
273 | parser.read_string('''[section]''') | |
274 | with pytest.raises(autosuspend.ConfigurationError): | |
275 | autosuspend.ActiveConnection.create('name', parser['section']) | |
276 | ||
277 | def test_create_no_number(self): | |
278 | parser = configparser.ConfigParser() | |
279 | parser.read_string('''[section] | |
280 | ports = 10,20xx,30''') | |
281 | with pytest.raises(autosuspend.ConfigurationError): | |
282 | autosuspend.ActiveConnection.create('name', parser['section']) | |
283 | ||
284 | ||
285 | class TestLoad(object): | |
286 | ||
287 | def test_below(self, monkeypatch): | |
288 | ||
289 | threshold = 1.34 | |
290 | ||
291 | def data(): | |
292 | return [0, threshold - 0.2, 0] | |
293 | monkeypatch.setattr(os, 'getloadavg', data) | |
294 | ||
295 | assert autosuspend.Load('foo', threshold).check() is None | |
296 | ||
297 | def test_above(self, monkeypatch): | |
298 | ||
299 | threshold = 1.34 | |
300 | ||
301 | def data(): | |
302 | return [0, threshold + 0.2, 0] | |
303 | monkeypatch.setattr(os, 'getloadavg', data) | |
304 | ||
305 | assert autosuspend.Load('foo', threshold).check() is not None | |
306 | ||
307 | def test_create(self): | |
308 | parser = configparser.ConfigParser() | |
309 | parser.read_string('''[section] | |
310 | threshold = 3.2''') | |
311 | assert autosuspend.Load.create( | |
312 | 'name', parser['section'])._threshold == 3.2 | |
313 | ||
314 | def test_create_no_number(self): | |
315 | parser = configparser.ConfigParser() | |
316 | parser.read_string('''[section] | |
317 | threshold = narf''') | |
318 | with pytest.raises(autosuspend.ConfigurationError): | |
319 | autosuspend.Load.create('name', parser['section']) | |
320 | ||
321 | ||
322 | class TestMpd(object): | |
323 | ||
324 | def test_playing(self, monkeypatch): | |
325 | ||
326 | check = autosuspend.Mpd('test', None, None, None) | |
327 | ||
328 | def get_state(): | |
329 | return {'state': 'play'} | |
330 | monkeypatch.setattr(check, '_get_state', get_state) | |
331 | ||
332 | assert check.check() is not None | |
333 | ||
334 | def test_not_playing(self, monkeypatch): | |
335 | ||
336 | check = autosuspend.Mpd('test', None, None, None) | |
337 | ||
338 | def get_state(): | |
339 | return {'state': 'pause'} | |
340 | monkeypatch.setattr(check, '_get_state', get_state) | |
341 | ||
342 | assert check.check() is None | |
343 | ||
344 | def test_correct_mpd_interaction(self, mocker): | |
345 | import mpd | |
346 | ||
347 | mock_instance = mocker.MagicMock(spec=mpd.MPDClient) | |
348 | mock_instance.status.return_value = {'state': 'play'} | |
349 | timeout_property = mocker.PropertyMock() | |
350 | type(mock_instance).timeout = timeout_property | |
351 | mock = mocker.patch('mpd.MPDClient') | |
352 | mock.return_value = mock_instance | |
353 | ||
354 | host = 'foo' | |
355 | port = 42 | |
356 | timeout = 17 | |
357 | ||
358 | assert autosuspend.Mpd('name', host, port, timeout).check() is not None | |
359 | ||
360 | timeout_property.assert_called_once_with(timeout) | |
361 | mock_instance.connect.assert_called_once_with(host, port) | |
362 | mock_instance.status.assert_called_once_with() | |
363 | mock_instance.close.assert_called_once_with() | |
364 | mock_instance.disconnect.assert_called_once_with() | |
365 | ||
366 | def test_handle_connection_errors(self): | |
367 | ||
368 | check = autosuspend.Mpd('test', None, None, None) | |
369 | ||
370 | def _get_state(): | |
371 | raise ConnectionError() | |
372 | ||
373 | check._get_state = _get_state | |
374 | ||
375 | with pytest.raises(autosuspend.TemporaryCheckError): | |
376 | check.check() | |
377 | ||
378 | def test_create(self): | |
379 | parser = configparser.ConfigParser() | |
380 | parser.read_string('''[section] | |
381 | host = host | |
382 | port = 1234 | |
383 | timeout = 12''') | |
384 | ||
385 | check = autosuspend.Mpd.create('name', parser['section']) | |
386 | ||
387 | assert check._host == 'host' | |
388 | assert check._port == 1234 | |
389 | assert check._timeout == 12 | |
390 | ||
391 | def test_create_port_no_number(self): | |
392 | parser = configparser.ConfigParser() | |
393 | parser.read_string('''[section] | |
394 | host = host | |
395 | port = string | |
396 | timeout = 12''') | |
397 | ||
398 | with pytest.raises(autosuspend.ConfigurationError): | |
399 | autosuspend.Mpd.create('name', parser['section']) | |
400 | ||
401 | def test_create_timeout_no_number(self): | |
402 | parser = configparser.ConfigParser() | |
403 | parser.read_string('''[section] | |
404 | host = host | |
405 | port = 10 | |
406 | timeout = string''') | |
407 | ||
408 | with pytest.raises(autosuspend.ConfigurationError): | |
409 | autosuspend.Mpd.create('name', parser['section']) | |
410 | ||
411 | ||
412 | class TestNetworkBandwidth(object): | |
413 | ||
414 | def test_smoke(self): | |
415 | check = autosuspend.NetworkBandwidth( | |
416 | 'name', psutil.net_if_addrs().keys(), 0, 0) | |
417 | # make some traffic | |
418 | requests.get('https://www.google.de/') | |
419 | assert check.check() is not None | |
420 | ||
421 | @pytest.fixture | |
422 | def mock_interfaces(self, mocker): | |
423 | mock = mocker.patch('psutil.net_if_addrs') | |
424 | mock.return_value = {'foo': None, 'bar': None, 'baz': None} | |
425 | ||
426 | def test_create(self, mock_interfaces): | |
427 | parser = configparser.ConfigParser() | |
428 | parser.read_string(''' | |
429 | [section] | |
430 | interfaces = foo, baz | |
431 | threshold_send = 200 | |
432 | threshold_receive = 300 | |
433 | ''') | |
434 | check = autosuspend.NetworkBandwidth.create('name', parser['section']) | |
435 | assert set(check._interfaces) == set(['foo', 'baz']) | |
436 | assert check._threshold_send == 200 | |
437 | assert check._threshold_receive == 300 | |
438 | ||
439 | def test_create_default(self, mock_interfaces): | |
440 | parser = configparser.ConfigParser() | |
441 | parser.read_string(''' | |
442 | [section] | |
443 | interfaces = foo, baz | |
444 | ''') | |
445 | check = autosuspend.NetworkBandwidth.create('name', parser['section']) | |
446 | assert set(check._interfaces) == set(['foo', 'baz']) | |
447 | assert check._threshold_send == 100 | |
448 | assert check._threshold_receive == 100 | |
449 | ||
450 | @pytest.mark.parametrize("config,error_match", [ | |
451 | (''' | |
452 | [section] | |
453 | interfaces = foo, NOTEXIST | |
454 | threshold_send = 200 | |
455 | threshold_receive = 300 | |
456 | ''', r'does not exist'), | |
457 | (''' | |
458 | [section] | |
459 | threshold_send = 200 | |
460 | threshold_receive = 300 | |
461 | ''', r'configuration key: \'interfaces\''), | |
462 | (''' | |
463 | [section] | |
464 | interfaces = | |
465 | threshold_send = 200 | |
466 | threshold_receive = 300 | |
467 | ''', r'No interfaces configured'), | |
468 | (''' | |
469 | [section] | |
470 | interfaces = foo, bar | |
471 | threshold_send = xxx | |
472 | ''', r'Threshold in wrong format'), | |
473 | (''' | |
474 | [section] | |
475 | interfaces = foo, bar | |
476 | threshold_receive = xxx | |
477 | ''', r'Threshold in wrong format'), | |
478 | ]) | |
479 | def test_create_error(self, mock_interfaces, config, error_match): | |
480 | parser = configparser.ConfigParser() | |
481 | parser.read_string(config) | |
482 | with pytest.raises(autosuspend.ConfigurationError, match=error_match): | |
483 | autosuspend.NetworkBandwidth.create('name', parser['section']) | |
484 | ||
485 | @pytest.mark.parametrize('send_threshold,receive_threshold,match', [ | |
486 | (sys.float_info.max, 0, 'receive'), | |
487 | (0, sys.float_info.max, 'sending'), | |
488 | ]) | |
489 | def test_with_activity(self, send_threshold, receive_threshold, match): | |
490 | check = autosuspend.NetworkBandwidth( | |
491 | 'name', psutil.net_if_addrs().keys(), | |
492 | send_threshold, receive_threshold) | |
493 | # make some traffic | |
494 | requests.get('https://www.google.de/') | |
495 | res = check.check() | |
496 | assert res is not None | |
497 | assert match in res | |
498 | ||
499 | def test_no_activity(self): | |
500 | check = autosuspend.NetworkBandwidth( | |
501 | 'name', psutil.net_if_addrs().keys(), | |
502 | sys.float_info.max, sys.float_info.max) | |
503 | # make some traffic | |
504 | requests.get('https://www.google.de/') | |
505 | assert check.check() is None | |
506 | ||
507 | ||
508 | class TestKodi(object): | |
509 | ||
510 | def test_playing(self, mocker): | |
511 | mock_reply = mocker.MagicMock() | |
512 | mock_reply.json.return_value = { | |
513 | "id": 1, "jsonrpc": "2.0", | |
514 | "result": [{"playerid": 0, "type": "audio"}]} | |
515 | mocker.patch('requests.get', return_value=mock_reply) | |
516 | ||
517 | assert autosuspend.Kodi('foo', 'url', 10).check() is not None | |
518 | ||
519 | mock_reply.json.assert_called_once_with() | |
520 | ||
521 | def test_not_playing(self, mocker): | |
522 | mock_reply = mocker.MagicMock() | |
523 | mock_reply.json.return_value = { | |
524 | "id": 1, "jsonrpc": "2.0", "result": []} | |
525 | mocker.patch('requests.get', return_value=mock_reply) | |
526 | ||
527 | assert autosuspend.Kodi('foo', 'url', 10).check() is None | |
528 | ||
529 | mock_reply.json.assert_called_once_with() | |
530 | ||
531 | def test_assertion_no_result(self, mocker): | |
532 | mock_reply = mocker.MagicMock() | |
533 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0"} | |
534 | mocker.patch('requests.get', return_value=mock_reply) | |
535 | ||
536 | with pytest.raises(autosuspend.TemporaryCheckError): | |
537 | autosuspend.Kodi('foo', 'url', 10).check() | |
538 | ||
539 | def test_request_error(self, mocker): | |
540 | mocker.patch('requests.get', | |
541 | side_effect=requests.exceptions.RequestException()) | |
542 | ||
543 | with pytest.raises(autosuspend.TemporaryCheckError): | |
544 | autosuspend.Kodi('foo', 'url', 10).check() | |
545 | ||
546 | def test_create(self): | |
547 | parser = configparser.ConfigParser() | |
548 | parser.read_string('''[section] | |
549 | url = anurl | |
550 | timeout = 12''') | |
551 | ||
552 | check = autosuspend.Kodi.create('name', parser['section']) | |
553 | ||
554 | assert check._url == 'anurl' | |
555 | assert check._timeout == 12 | |
556 | ||
557 | def test_create_timeout_no_number(self): | |
558 | parser = configparser.ConfigParser() | |
559 | parser.read_string('''[section] | |
560 | url = anurl | |
561 | timeout = string''') | |
562 | ||
563 | with pytest.raises(autosuspend.ConfigurationError): | |
564 | autosuspend.Kodi.create('name', parser['section']) | |
565 | ||
566 | ||
567 | class TestPing(object): | |
568 | ||
569 | def test_smoke(self, mocker): | |
570 | mock = mocker.patch('subprocess.call') | |
571 | mock.return_value = 1 | |
572 | ||
573 | hosts = ['abc', '129.123.145.42'] | |
574 | ||
575 | assert autosuspend.Ping('name', hosts).check() is None | |
576 | ||
577 | assert mock.call_count == len(hosts) | |
578 | for (args, _), host in zip(mock.call_args_list, hosts): | |
579 | assert args[0][-1] == host | |
580 | ||
581 | def test_matching(self, mocker): | |
582 | mock = mocker.patch('subprocess.call') | |
583 | mock.return_value = 0 | |
584 | assert autosuspend.Ping('name', ['foo']).check() is not None | |
585 | ||
586 | def test_create_missing_hosts(self): | |
587 | parser = configparser.ConfigParser() | |
588 | parser.read_string('''[section]''') | |
589 | with pytest.raises(autosuspend.ConfigurationError): | |
590 | autosuspend.Ping.create('name', parser['section']) | |
591 | ||
592 | def test_create_host_splitting(self): | |
593 | parser = configparser.ConfigParser() | |
594 | parser.read_string('''[section] | |
595 | hosts=a,b,c''') | |
596 | ping = autosuspend.Ping.create('name', parser['section']) | |
597 | assert ping._hosts == ['a', 'b', 'c'] | |
598 | ||
599 | ||
600 | class TestXIdleTime(object): | |
601 | ||
602 | def test_create_default(self): | |
603 | parser = configparser.ConfigParser() | |
604 | parser.read_string('''[section]''') | |
605 | check = autosuspend.XIdleTime.create('name', parser['section']) | |
606 | assert check._timeout == 600 | |
607 | assert check._ignore_process_re == re.compile(r'a^') | |
608 | assert check._ignore_users_re == re.compile(r'a^') | |
609 | assert check._provide_sessions == check._list_sessions_sockets | |
610 | ||
611 | def test_create(self): | |
612 | parser = configparser.ConfigParser() | |
613 | parser.read_string('''[section] | |
614 | timeout = 42 | |
615 | ignore_if_process = .*test | |
616 | ignore_users = test.*test | |
617 | method = logind''') | |
618 | check = autosuspend.XIdleTime.create('name', parser['section']) | |
619 | assert check._timeout == 42 | |
620 | assert check._ignore_process_re == re.compile(r'.*test') | |
621 | assert check._ignore_users_re == re.compile(r'test.*test') | |
622 | assert check._provide_sessions == check._list_sessions_logind | |
623 | ||
624 | def test_create_no_int(self): | |
625 | parser = configparser.ConfigParser() | |
626 | parser.read_string('''[section] | |
627 | timeout = string''') | |
628 | with pytest.raises(autosuspend.ConfigurationError): | |
629 | autosuspend.XIdleTime.create('name', parser['section']) | |
630 | ||
631 | def test_create_broken_process_re(self): | |
632 | parser = configparser.ConfigParser() | |
633 | parser.read_string('''[section] | |
634 | ignore_if_process = [[a-9]''') | |
635 | with pytest.raises(autosuspend.ConfigurationError): | |
636 | autosuspend.XIdleTime.create('name', parser['section']) | |
637 | ||
638 | def test_create_broken_users_re(self): | |
639 | parser = configparser.ConfigParser() | |
640 | parser.read_string('''[section] | |
641 | ignore_users = [[a-9]''') | |
642 | with pytest.raises(autosuspend.ConfigurationError): | |
643 | autosuspend.XIdleTime.create('name', parser['section']) | |
644 | ||
645 | def test_create_unknown_method(self): | |
646 | parser = configparser.ConfigParser() | |
647 | parser.read_string('''[section] | |
648 | method = asdfasdf''') | |
649 | with pytest.raises(autosuspend.ConfigurationError): | |
650 | autosuspend.XIdleTime.create('name', parser['section']) | |
651 | ||
652 | def test_list_sessions_logind(self, mocker): | |
653 | mock = mocker.patch('autosuspend._list_logind_sessions') | |
654 | mock.return_value = [('c1', {'Name': 'foo'}), | |
655 | ('c2', {'Display': 'asdfasf'}), | |
656 | ('c3', {'Name': 'hello', 'Display': 'nonumber'}), | |
657 | ('c4', {'Name': 'hello', 'Display': '3'})] | |
658 | ||
659 | parser = configparser.ConfigParser() | |
660 | parser.read_string('''[section]''') | |
661 | check = autosuspend.XIdleTime.create('name', parser['section']) | |
662 | assert check._list_sessions_logind() == [(3, 'hello')] | |
663 | ||
664 | def test_list_sessions_socket(self, mocker): | |
665 | mock_glob = mocker.patch('glob.glob') | |
666 | mock_glob.return_value = ['/tmp/.X11-unix/X0', | |
667 | '/tmp/.X11-unix/X42', | |
668 | '/tmp/.X11-unix/Xnum'] | |
669 | ||
670 | stat_return = os.stat(os.path.realpath(__file__)) | |
671 | this_user = pwd.getpwuid(stat_return.st_uid) | |
672 | mock_stat = mocker.patch('os.stat') | |
673 | mock_stat.return_value = stat_return | |
674 | ||
675 | mock_pwd = mocker.patch('pwd.getpwuid') | |
676 | mock_pwd.return_value = this_user | |
677 | ||
678 | parser = configparser.ConfigParser() | |
679 | parser.read_string('''[section]''') | |
680 | check = autosuspend.XIdleTime.create('name', parser['section']) | |
681 | assert check._list_sessions_sockets() == [(0, this_user.pw_name), | |
682 | (42, this_user.pw_name)] | |
683 | ||
684 | ||
685 | class TestExternalCommand(object): | |
686 | ||
687 | def test_create(self): | |
688 | parser = configparser.ConfigParser() | |
689 | parser.read_string('''[section] | |
690 | command = narf bla ''') | |
691 | check = autosuspend.ExternalCommand.create('name', parser['section']) | |
692 | assert check._command == 'narf bla' | |
693 | ||
694 | def test_create_no_command(self): | |
695 | parser = configparser.ConfigParser() | |
696 | parser.read_string('''[section]''') | |
697 | with pytest.raises(autosuspend.ConfigurationError): | |
698 | autosuspend.ExternalCommand.create('name', parser['section']) | |
699 | ||
700 | def test_check(self, mocker): | |
701 | mock = mocker.patch('subprocess.check_call') | |
702 | parser = configparser.ConfigParser() | |
703 | parser.read_string('''[section] | |
704 | command = foo bar''') | |
705 | assert autosuspend.ExternalCommand.create( | |
706 | 'name', parser['section']).check() is not None | |
707 | mock.assert_called_once_with('foo bar', shell=True) | |
708 | ||
709 | def test_check_no_match(self, mocker): | |
710 | mock = mocker.patch('subprocess.check_call') | |
711 | mock.side_effect = subprocess.CalledProcessError(2, 'foo bar') | |
712 | parser = configparser.ConfigParser() | |
713 | parser.read_string('''[section] | |
714 | command = foo bar''') | |
715 | assert autosuspend.ExternalCommand.create( | |
716 | 'name', parser['section']).check() is None | |
717 | mock.assert_called_once_with('foo bar', shell=True) | |
718 | ||
719 | ||
720 | class TestXPath(object): | |
721 | ||
722 | def test_matching(self, mocker): | |
723 | mock_reply = mocker.MagicMock() | |
724 | text_property = mocker.PropertyMock() | |
725 | type(mock_reply).text = text_property | |
726 | text_property.return_value = "<a></a>" | |
727 | mock_method = mocker.patch('requests.get', return_value=mock_reply) | |
728 | ||
729 | url = 'nourl' | |
730 | assert autosuspend.XPath('foo', '/a', url, 5).check() is not None | |
731 | ||
732 | mock_method.assert_called_once_with(url, timeout=5) | |
733 | text_property.assert_called_once_with() | |
734 | ||
735 | def test_not_matching(self, mocker): | |
736 | mock_reply = mocker.MagicMock() | |
737 | text_property = mocker.PropertyMock() | |
738 | type(mock_reply).text = text_property | |
739 | text_property.return_value = "<a></a>" | |
740 | mocker.patch('requests.get', return_value=mock_reply) | |
741 | ||
742 | assert autosuspend.XPath('foo', '/b', 'nourl', 5).check() is None | |
743 | ||
744 | def test_broken_xml(self, mocker): | |
745 | with pytest.raises(autosuspend.TemporaryCheckError): | |
746 | mock_reply = mocker.MagicMock() | |
747 | text_property = mocker.PropertyMock() | |
748 | type(mock_reply).text = text_property | |
749 | text_property.return_value = "//broken" | |
750 | mocker.patch('requests.get', return_value=mock_reply) | |
751 | ||
752 | autosuspend.XPath('foo', '/b', 'nourl', 5).check() | |
753 | ||
754 | def test_xpath_prevalidation(self): | |
755 | with pytest.raises(autosuspend.ConfigurationError, | |
756 | match=r'^Invalid xpath.*'): | |
757 | parser = configparser.ConfigParser() | |
758 | parser.read_string('''[section] | |
759 | xpath=|34/ad | |
760 | url=nourl''') | |
761 | autosuspend.XPath.create('name', parser['section']) | |
762 | ||
763 | @pytest.mark.parametrize('entry,', ['xpath', 'url']) | |
764 | def test_missing_config_entry(self, entry): | |
765 | with pytest.raises(autosuspend.ConfigurationError, | |
766 | match=r"^No '" + entry + "'.*"): | |
767 | parser = configparser.ConfigParser() | |
768 | parser.read_string('''[section] | |
769 | xpath=/valid | |
770 | url=nourl''') | |
771 | del parser['section'][entry] | |
772 | autosuspend.XPath.create('name', parser['section']) | |
773 | ||
774 | def test_create_default_timeout(self): | |
775 | parser = configparser.ConfigParser() | |
776 | parser.read_string('''[section] | |
777 | xpath=/valid | |
778 | url=nourl''') | |
779 | check = autosuspend.XPath.create('name', parser['section']) | |
780 | assert check._timeout == 5 | |
781 | ||
782 | def test_create_timeout(self): | |
783 | parser = configparser.ConfigParser() | |
784 | parser.read_string('''[section] | |
785 | xpath=/valid | |
786 | url=nourl | |
787 | timeout=42''') | |
788 | check = autosuspend.XPath.create('name', parser['section']) | |
789 | assert check._timeout == 42 | |
790 | ||
791 | def test_create_invalid_timeout(self): | |
792 | with pytest.raises(autosuspend.ConfigurationError, | |
793 | match=r"^Configuration error .*"): | |
794 | parser = configparser.ConfigParser() | |
795 | parser.read_string('''[section] | |
796 | xpath=/valid | |
797 | url=nourl | |
798 | timeout=xx''') | |
799 | autosuspend.XPath.create('name', parser['section']) | |
800 | ||
801 | def test_requests_exception(self, mocker): | |
802 | with pytest.raises(autosuspend.TemporaryCheckError): | |
803 | mock_method = mocker.patch('requests.get') | |
804 | mock_method.side_effect = requests.exceptions.ReadTimeout() | |
805 | ||
806 | autosuspend.XPath('foo', '/a', 'asdf', 5).check() | |
807 | ||
808 | ||
809 | class TestLogindSessionsIdle(object): | |
810 | ||
811 | def test_smoke(self): | |
812 | check = autosuspend.LogindSessionsIdle( | |
813 | 'test', ['tty', 'x11', 'wayland'], ['active', 'online']) | |
814 | assert check._types == ['tty', 'x11', 'wayland'] | |
815 | assert check._states == ['active', 'online'] | |
816 | try: | |
817 | # only run the test if the dbus module is available (not on travis) | |
818 | import dbus # noqa: F401 | |
819 | check.check() | |
820 | except ImportError: | |
821 | pass | |
822 | ||
823 | def test_configure_defaults(self): | |
824 | parser = configparser.ConfigParser() | |
825 | parser.read_string('[section]') | |
826 | check = autosuspend.LogindSessionsIdle.create( | |
827 | 'name', parser['section']) | |
828 | assert check._types == ['tty', 'x11', 'wayland'] | |
829 | assert check._states == ['active', 'online'] | |
830 | ||
831 | def test_configure_types(self): | |
832 | parser = configparser.ConfigParser() | |
833 | parser.read_string('''[section] | |
834 | types=test, bla,foo''') | |
835 | check = autosuspend.LogindSessionsIdle.create( | |
836 | 'name', parser['section']) | |
837 | assert check._types == ['test', 'bla', 'foo'] | |
838 | ||
839 | def test_configure_states(self): | |
840 | parser = configparser.ConfigParser() | |
841 | parser.read_string('''[section] | |
842 | states=test, bla,foo''') | |
843 | check = autosuspend.LogindSessionsIdle.create( | |
844 | 'name', parser['section']) | |
845 | assert check._states == ['test', 'bla', 'foo'] | |
846 | ||
847 | ||
848 | def test_execute_suspend(mocker): | |
849 | mock = mocker.patch('subprocess.check_call') | |
850 | command = ['foo', 'bar'] | |
851 | autosuspend.execute_suspend(command) | |
852 | mock.assert_called_once_with(command, shell=True) | |
853 | ||
854 | ||
855 | def test_execute_suspend_call_exception(mocker): | |
856 | mock = mocker.patch('subprocess.check_call') | |
857 | command = ['foo', 'bar'] | |
858 | mock.side_effect = subprocess.CalledProcessError(2, command) | |
859 | ||
860 | spy = mocker.spy(autosuspend._logger, 'warning') | |
861 | ||
862 | autosuspend.execute_suspend(command) | |
863 | ||
864 | mock.assert_called_once_with(command, shell=True) | |
865 | assert spy.call_count == 1 | |
866 | ||
867 | ||
868 | def test_configure_logging_debug(mocker): | |
869 | mock = mocker.patch('logging.basicConfig') | |
870 | ||
871 | autosuspend.configure_logging(True) | |
872 | ||
873 | mock.assert_called_once_with(level=logging.DEBUG) | |
874 | ||
875 | ||
876 | def test_configure_logging_standard(mocker): | |
877 | mock = mocker.patch('logging.basicConfig') | |
878 | ||
879 | autosuspend.configure_logging(False) | |
880 | ||
881 | mock.assert_called_once_with(level=logging.WARNING) | |
882 | ||
883 | ||
884 | def test_configure_logging_file(mocker): | |
885 | mock = mocker.patch('logging.config.fileConfig') | |
886 | ||
887 | # anything that is not a boolean is treated like a file | |
888 | autosuspend.configure_logging(42) | |
889 | ||
890 | mock.assert_called_once_with(42) | |
891 | ||
892 | ||
893 | def test_configure_logging_file_fallback(mocker): | |
894 | mock = mocker.patch('logging.config.fileConfig', | |
895 | side_effect=RuntimeError()) | |
896 | mock_basic = mocker.patch('logging.basicConfig') | |
897 | ||
898 | # anything that is not a boolean is treated like a file | |
899 | autosuspend.configure_logging(42) | |
900 | ||
901 | mock.assert_called_once_with(42) | |
902 | mock_basic.assert_called_once_with(level=logging.WARNING) | |
903 | ||
904 | ||
905 | def test_set_up_checks(mocker): | |
906 | mock_class = mocker.patch('autosuspend.Mpd') | |
907 | mock_class.create.return_value = mocker.MagicMock(spec=autosuspend.Check) | |
908 | ||
909 | parser = configparser.ConfigParser() | |
910 | parser.read_string('''[check.Foo] | |
911 | class = Mpd | |
912 | enabled = True''') | |
913 | ||
914 | autosuspend.set_up_checks(parser) | |
915 | ||
916 | mock_class.create.assert_called_once_with('Foo', parser['check.Foo']) | |
917 | ||
918 | ||
919 | def test_set_up_checks_not_enabled(mocker): | |
920 | mock_class = mocker.patch('autosuspend.Mpd') | |
921 | mock_class.create.return_value = mocker.MagicMock(spec=autosuspend.Check) | |
922 | ||
923 | parser = configparser.ConfigParser() | |
924 | parser.read_string('''[check.Foo] | |
925 | class = Mpd | |
926 | enabled = False''') | |
927 | ||
928 | with pytest.raises(autosuspend.ConfigurationError): | |
929 | autosuspend.set_up_checks(parser) | |
930 | ||
931 | ||
932 | def test_set_up_checks_no_such_class(mocker): | |
933 | parser = configparser.ConfigParser() | |
934 | parser.read_string('''[check.Foo] | |
935 | class = FooBarr | |
936 | enabled = True''') | |
937 | with pytest.raises(autosuspend.ConfigurationError): | |
938 | autosuspend.set_up_checks(parser) | |
939 | ||
940 | ||
941 | def test_set_up_checks_not_a_check(mocker): | |
942 | mock_class = mocker.patch('autosuspend.Mpd') | |
943 | mock_class.create.return_value = mocker.MagicMock() | |
944 | ||
945 | parser = configparser.ConfigParser() | |
946 | parser.read_string('''[check.Foo] | |
947 | class = Mpd | |
948 | enabled = True''') | |
949 | ||
950 | with pytest.raises(autosuspend.ConfigurationError): | |
951 | autosuspend.set_up_checks(parser) | |
952 | ||
953 | mock_class.create.assert_called_once_with('Foo', parser['check.Foo']) | |
954 | ||
955 | ||
956 | class TestExecuteChecks(object): | |
957 | ||
958 | def test_no_checks(self, mocker): | |
959 | assert autosuspend.execute_checks( | |
960 | [], False, mocker.MagicMock()) is False | |
961 | ||
962 | def test_matches(self, mocker): | |
963 | matching_check = mocker.MagicMock(spec=autosuspend.Check) | |
964 | matching_check.name = 'foo' | |
965 | matching_check.check.return_value = "matches" | |
966 | assert autosuspend.execute_checks( | |
967 | [matching_check], False, mocker.MagicMock()) is True | |
968 | matching_check.check.assert_called_once_with() | |
969 | ||
970 | def test_only_first_called(self, mocker): | |
971 | matching_check = mocker.MagicMock(spec=autosuspend.Check) | |
972 | matching_check.name = 'foo' | |
973 | matching_check.check.return_value = "matches" | |
974 | second_check = mocker.MagicMock() | |
975 | second_check.name = 'bar' | |
976 | second_check.check.return_value = "matches" | |
977 | ||
978 | assert autosuspend.execute_checks( | |
979 | [matching_check, second_check], | |
980 | False, | |
981 | mocker.MagicMock()) is True | |
982 | matching_check.check.assert_called_once_with() | |
983 | second_check.check.assert_not_called() | |
984 | ||
985 | def test_all_called(self, mocker): | |
986 | matching_check = mocker.MagicMock(spec=autosuspend.Check) | |
987 | matching_check.name = 'foo' | |
988 | matching_check.check.return_value = "matches" | |
989 | second_check = mocker.MagicMock() | |
990 | second_check.name = 'bar' | |
991 | second_check.check.return_value = "matches" | |
992 | ||
993 | assert autosuspend.execute_checks( | |
994 | [matching_check, second_check], | |
995 | True, | |
996 | mocker.MagicMock()) is True | |
997 | matching_check.check.assert_called_once_with() | |
998 | second_check.check.assert_called_once_with() | |
999 | ||
1000 | def test_ignore_temporary_errors(self, mocker): | |
1001 | matching_check = mocker.MagicMock(spec=autosuspend.Check) | |
1002 | matching_check.name = 'foo' | |
1003 | matching_check.check.side_effect = autosuspend.TemporaryCheckError() | |
1004 | second_check = mocker.MagicMock() | |
1005 | second_check.name = 'bar' | |
1006 | second_check.check.return_value = "matches" | |
1007 | ||
1008 | assert autosuspend.execute_checks( | |
1009 | [matching_check, second_check], | |
1010 | False, | |
1011 | mocker.MagicMock()) is True | |
1012 | matching_check.check.assert_called_once_with() | |
1013 | second_check.check.assert_called_once_with() |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch would_suspend | |
4 | woke_up_file = /tmp/test-woke-up-1 | |
5 | ||
6 | [check.ExternalCommand] | |
7 | enabled = True | |
8 | command = true |
0 | ||
1 | Samba version 4.7.0 | |
2 | PID Username Group Machine Protocol Version Encryption Signing | |
3 | ---------------------------------------------------------------------------------------------------------------------------------------- |
0 | ||
1 | Samba version 3.5.1 | |
2 | PID Username Group Machine | |
3 | ------------------------------------------------------------------- | |
4 | 14944 <uid> it 131.169.214.117 (131.169.214.117) | |
5 | 14944 <uid> it 131.169.214.117 (131.169.214.117) |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch would_suspend | |
4 | woke_up_file = /tmp/test-woke-up-2 | |
5 | ||
6 | [check.ExternalCommand] | |
7 | enabled = True | |
8 | command = false |
0 | import os | |
1 | import os.path | |
2 | ||
3 | import pytest | |
4 | ||
5 | import autosuspend | |
6 | ||
7 | ||
8 | ROOT = os.path.dirname(os.path.realpath(__file__)) | |
9 | ||
10 | SUSPENSION_FILE = 'would_suspend' | |
11 | ||
12 | ||
13 | @pytest.fixture | |
14 | def suspension_file(): | |
15 | try: | |
16 | os.remove(SUSPENSION_FILE) | |
17 | except OSError as error: | |
18 | pass | |
19 | ||
20 | class SuspensionFileFixture(object): | |
21 | ||
22 | def exists(self): | |
23 | return os.path.exists(SUSPENSION_FILE) | |
24 | ||
25 | yield SuspensionFileFixture() | |
26 | ||
27 | try: | |
28 | os.remove(SUSPENSION_FILE) | |
29 | except OSError as error: | |
30 | pass | |
31 | ||
32 | ||
33 | def test_no_suspend_if_matching(suspension_file): | |
34 | autosuspend.main([ | |
35 | '-c', | |
36 | os.path.join(ROOT, 'test_data', 'dont_suspend.conf'), | |
37 | '-r', | |
38 | '10', | |
39 | '-l']) | |
40 | ||
41 | assert not suspension_file.exists() | |
42 | ||
43 | ||
44 | def test_suspend(suspension_file): | |
45 | autosuspend.main([ | |
46 | '-c', | |
47 | os.path.join(ROOT, 'test_data', 'would_suspend.conf'), | |
48 | '-r', | |
49 | '10', | |
50 | '-l']) | |
51 | ||
52 | assert suspension_file.exists() |
0 | import abc | |
1 | ||
2 | ||
3 | class CheckTest(abc.ABC): | |
4 | ||
5 | @abc.abstractmethod | |
6 | def create_instance(self, name): | |
7 | pass | |
8 | ||
9 | def test_name_passing(self): | |
10 | name = 'checktestname' | |
11 | assert self.create_instance(name).name == name |
0 | import base64 | |
1 | import http.server | |
2 | import os | |
3 | import threading | |
4 | ||
5 | import pytest | |
6 | ||
7 | ||
8 | class AuthHandler(http.server.SimpleHTTPRequestHandler): | |
9 | def do_HEAD(self): # noqa: required name | |
10 | self.send_response(200) | |
11 | self.send_header('Content-type', 'text/html') | |
12 | self.end_headers() | |
13 | ||
14 | def do_AUTHHEAD(self): # noqa: required name | |
15 | self.send_response(401) | |
16 | self.send_header('WWW-Authenticate', 'Basic realm=\"Test\"') | |
17 | self.send_header('Content-type', 'text/html') | |
18 | self.end_headers() | |
19 | ||
20 | def do_GET(self): # noqa: required name | |
21 | key = '{}:{}'.format('user', 'pass').encode('ascii') | |
22 | key = base64.b64encode(key) | |
23 | valid_header = b'Basic ' + key | |
24 | ||
25 | auth_header = self.headers.get( | |
26 | 'Authorization', '').encode('ascii') | |
27 | ||
28 | if self.headers['Authorization'] is None: | |
29 | self.do_AUTHHEAD() | |
30 | self.wfile.write(b'no auth header received') | |
31 | elif auth_header == valid_header: | |
32 | http.server.SimpleHTTPRequestHandler.do_GET(self) | |
33 | else: | |
34 | self.do_AUTHHEAD() | |
35 | self.wfile.write(auth_header) | |
36 | self.wfile.write(b'not authenticated') | |
37 | ||
38 | ||
39 | def _serve(request, handler): | |
40 | previous_cwd = os.getcwd() | |
41 | ||
42 | target_dir = os.path.join(os.path.dirname(__file__), 'test_data') | |
43 | if request and hasattr(request, 'param'): | |
44 | target_dir = request.param | |
45 | os.chdir(target_dir) | |
46 | ||
47 | server = http.server.HTTPServer(('localhost', 0), handler) | |
48 | ||
49 | def resource_address(resource: str) -> str: | |
50 | return 'http://localhost:{}/{}'.format( | |
51 | server.server_address[1], resource) | |
52 | ||
53 | server.resource_address = resource_address | |
54 | ||
55 | threading.Thread(target=server.serve_forever).start() | |
56 | ||
57 | yield server | |
58 | ||
59 | server.shutdown() | |
60 | os.chdir(previous_cwd) | |
61 | ||
62 | ||
63 | @pytest.fixture(scope='session') | |
64 | def stub_server(request): | |
65 | yield from _serve(request, http.server.SimpleHTTPRequestHandler) | |
66 | ||
67 | ||
68 | @pytest.fixture(scope='session') | |
69 | def stub_auth_server(request): | |
70 | yield from _serve(request, AuthHandler) |
0 | import configparser | |
1 | from datetime import datetime, timedelta, timezone | |
2 | import logging | |
3 | import subprocess | |
4 | ||
5 | import pytest | |
6 | ||
7 | import autosuspend | |
8 | ||
9 | ||
10 | class TestExecuteSuspend: | |
11 | ||
12 | def test_smoke(self, mocker): | |
13 | mock = mocker.patch('subprocess.check_call') | |
14 | command = ['foo', 'bar'] | |
15 | autosuspend.execute_suspend(command, None) | |
16 | mock.assert_called_once_with(command, shell=True) | |
17 | ||
18 | def test_call_exception(self, mocker): | |
19 | mock = mocker.patch('subprocess.check_call') | |
20 | command = ['foo', 'bar'] | |
21 | mock.side_effect = subprocess.CalledProcessError(2, command) | |
22 | ||
23 | spy = mocker.spy(autosuspend._logger, 'warning') | |
24 | ||
25 | autosuspend.execute_suspend(command, None) | |
26 | ||
27 | mock.assert_called_once_with(command, shell=True) | |
28 | assert spy.call_count == 1 | |
29 | ||
30 | ||
31 | class TestScheduleWakeup: | |
32 | ||
33 | def test_smoke(self, mocker): | |
34 | mock = mocker.patch('subprocess.check_call') | |
35 | dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4))) | |
36 | autosuspend.schedule_wakeup('echo {timestamp:.0f} {iso}', dt) | |
37 | mock.assert_called_once_with( | |
38 | 'echo 1525270801 2018-05-02T18:20:01+04:00', shell=True) | |
39 | ||
40 | def test_call_exception(self, mocker): | |
41 | mock = mocker.patch('subprocess.check_call') | |
42 | mock.side_effect = subprocess.CalledProcessError(2, "foo") | |
43 | ||
44 | spy = mocker.spy(autosuspend._logger, 'warning') | |
45 | ||
46 | autosuspend.schedule_wakeup("foo", datetime.now(timezone.utc)) | |
47 | ||
48 | mock.assert_called_once_with("foo", shell=True) | |
49 | assert spy.call_count == 1 | |
50 | ||
51 | ||
52 | class TestConfigureLogging: | |
53 | ||
54 | def test_debug(self, mocker): | |
55 | mock = mocker.patch('logging.basicConfig') | |
56 | ||
57 | autosuspend.configure_logging(True) | |
58 | ||
59 | mock.assert_called_once_with(level=logging.DEBUG) | |
60 | ||
61 | def test_standard(self, mocker): | |
62 | mock = mocker.patch('logging.basicConfig') | |
63 | ||
64 | autosuspend.configure_logging(False) | |
65 | ||
66 | mock.assert_called_once_with(level=logging.WARNING) | |
67 | ||
68 | def test_file(self, mocker): | |
69 | mock = mocker.patch('logging.config.fileConfig') | |
70 | ||
71 | # anything that is not a boolean is treated like a file | |
72 | autosuspend.configure_logging(42) | |
73 | ||
74 | mock.assert_called_once_with(42) | |
75 | ||
76 | def test_file_fallback(self, mocker): | |
77 | mock = mocker.patch('logging.config.fileConfig', | |
78 | side_effect=RuntimeError()) | |
79 | mock_basic = mocker.patch('logging.basicConfig') | |
80 | ||
81 | # anything that is not a boolean is treated like a file | |
82 | autosuspend.configure_logging(42) | |
83 | ||
84 | mock.assert_called_once_with(42) | |
85 | mock_basic.assert_called_once_with(level=logging.WARNING) | |
86 | ||
87 | ||
88 | class TestSetUpChecks: | |
89 | ||
90 | def test_smoke(self, mocker): | |
91 | mock_class = mocker.patch('autosuspend.checks.activity.Mpd') | |
92 | mock_class.create.return_value = mocker.MagicMock( | |
93 | spec=autosuspend.checks.Activity) | |
94 | ||
95 | parser = configparser.ConfigParser() | |
96 | parser.read_string('''[check.Foo] | |
97 | class = Mpd | |
98 | enabled = True''') | |
99 | ||
100 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
101 | autosuspend.Activity) | |
102 | ||
103 | mock_class.create.assert_called_once_with('Foo', parser['check.Foo']) | |
104 | ||
105 | def test_external_class(self, mocker): | |
106 | mock_class = mocker.patch('os.path.TestCheck', create=True) | |
107 | mock_class.create.return_value = mocker.MagicMock( | |
108 | spec=autosuspend.checks.Activity) | |
109 | parser = configparser.ConfigParser() | |
110 | parser.read_string('''[check.Foo] | |
111 | class = os.path.TestCheck | |
112 | enabled = True''') | |
113 | ||
114 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
115 | autosuspend.Activity) | |
116 | ||
117 | mock_class.create.assert_called_once_with('Foo', parser['check.Foo']) | |
118 | ||
119 | def test_not_enabled(self, mocker): | |
120 | mock_class = mocker.patch('autosuspend.checks.activity.Mpd') | |
121 | mock_class.create.return_value = mocker.MagicMock( | |
122 | spec=autosuspend.Activity) | |
123 | ||
124 | parser = configparser.ConfigParser() | |
125 | parser.read_string('''[check.Foo] | |
126 | class = Mpd | |
127 | enabled = False''') | |
128 | ||
129 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
130 | autosuspend.Activity) | |
131 | ||
132 | with pytest.raises(autosuspend.ConfigurationError): | |
133 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
134 | autosuspend.Activity, | |
135 | error_none=True) | |
136 | ||
137 | def test_no_such_class(self, mocker): | |
138 | parser = configparser.ConfigParser() | |
139 | parser.read_string('''[check.Foo] | |
140 | class = FooBarr | |
141 | enabled = True''') | |
142 | with pytest.raises(autosuspend.ConfigurationError): | |
143 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
144 | autosuspend.Activity) | |
145 | ||
146 | def test_not_a_check(self, mocker): | |
147 | mock_class = mocker.patch('autosuspend.checks.activity.Mpd') | |
148 | mock_class.create.return_value = mocker.MagicMock() | |
149 | ||
150 | parser = configparser.ConfigParser() | |
151 | parser.read_string('''[check.Foo] | |
152 | class = Mpd | |
153 | enabled = True''') | |
154 | ||
155 | with pytest.raises(autosuspend.ConfigurationError): | |
156 | autosuspend.set_up_checks(parser, 'check', 'activity', | |
157 | autosuspend.Activity) | |
158 | ||
159 | mock_class.create.assert_called_once_with('Foo', parser['check.Foo']) | |
160 | ||
161 | ||
162 | class TestExecuteChecks: | |
163 | ||
164 | def test_no_checks(self, mocker): | |
165 | assert autosuspend.execute_checks( | |
166 | [], False, mocker.MagicMock()) is False | |
167 | ||
168 | def test_matches(self, mocker): | |
169 | matching_check = mocker.MagicMock( | |
170 | spec=autosuspend.Activity) | |
171 | matching_check.name = 'foo' | |
172 | matching_check.check.return_value = "matches" | |
173 | assert autosuspend.execute_checks( | |
174 | [matching_check], False, mocker.MagicMock()) is True | |
175 | matching_check.check.assert_called_once_with() | |
176 | ||
177 | def test_only_first_called(self, mocker): | |
178 | matching_check = mocker.MagicMock( | |
179 | spec=autosuspend.Activity) | |
180 | matching_check.name = 'foo' | |
181 | matching_check.check.return_value = "matches" | |
182 | second_check = mocker.MagicMock() | |
183 | second_check.name = 'bar' | |
184 | second_check.check.return_value = "matches" | |
185 | ||
186 | assert autosuspend.execute_checks( | |
187 | [matching_check, second_check], | |
188 | False, | |
189 | mocker.MagicMock()) is True | |
190 | matching_check.check.assert_called_once_with() | |
191 | second_check.check.assert_not_called() | |
192 | ||
193 | def test_all_called(self, mocker): | |
194 | matching_check = mocker.MagicMock( | |
195 | spec=autosuspend.Activity) | |
196 | matching_check.name = 'foo' | |
197 | matching_check.check.return_value = "matches" | |
198 | second_check = mocker.MagicMock() | |
199 | second_check.name = 'bar' | |
200 | second_check.check.return_value = "matches" | |
201 | ||
202 | assert autosuspend.execute_checks( | |
203 | [matching_check, second_check], | |
204 | True, | |
205 | mocker.MagicMock()) is True | |
206 | matching_check.check.assert_called_once_with() | |
207 | second_check.check.assert_called_once_with() | |
208 | ||
209 | def test_ignore_temporary_errors(self, mocker): | |
210 | matching_check = mocker.MagicMock( | |
211 | spec=autosuspend.Activity) | |
212 | matching_check.name = 'foo' | |
213 | matching_check.check.side_effect = autosuspend.TemporaryCheckError() | |
214 | second_check = mocker.MagicMock() | |
215 | second_check.name = 'bar' | |
216 | second_check.check.return_value = "matches" | |
217 | ||
218 | assert autosuspend.execute_checks( | |
219 | [matching_check, second_check], | |
220 | False, | |
221 | mocker.MagicMock()) is True | |
222 | matching_check.check.assert_called_once_with() | |
223 | second_check.check.assert_called_once_with() | |
224 | ||
225 | ||
226 | class TestExecuteWakeups: | |
227 | ||
228 | def test_no_wakeups(self, mocker): | |
229 | assert autosuspend.execute_wakeups( | |
230 | [], 0, mocker.MagicMock()) is None | |
231 | ||
232 | def test_all_none(self, mocker): | |
233 | wakeup = mocker.MagicMock( | |
234 | spec=autosuspend.Wakeup) | |
235 | wakeup.check.return_value = None | |
236 | assert autosuspend.execute_wakeups( | |
237 | [wakeup], 0, mocker.MagicMock()) is None | |
238 | ||
239 | def test_basic_return(self, mocker): | |
240 | wakeup = mocker.MagicMock( | |
241 | spec=autosuspend.Wakeup) | |
242 | now = datetime.now(timezone.utc) | |
243 | wakeup_time = now + timedelta(seconds=10) | |
244 | wakeup.check.return_value = wakeup_time | |
245 | assert autosuspend.execute_wakeups( | |
246 | [wakeup], now, mocker.MagicMock()) == wakeup_time | |
247 | ||
248 | def test_soonest_taken(self, mocker): | |
249 | reference = datetime.now(timezone.utc) | |
250 | wakeup = mocker.MagicMock( | |
251 | spec=autosuspend.Wakeup) | |
252 | wakeup.check.return_value = reference + timedelta(seconds=20) | |
253 | earlier = reference + timedelta(seconds=10) | |
254 | wakeup_earlier = mocker.MagicMock( | |
255 | spec=autosuspend.Wakeup) | |
256 | wakeup_earlier.check.return_value = earlier | |
257 | in_between = reference + timedelta(seconds=15) | |
258 | wakeup_later = mocker.MagicMock( | |
259 | spec=autosuspend.Wakeup) | |
260 | wakeup_later.check.return_value = in_between | |
261 | assert autosuspend.execute_wakeups( | |
262 | [wakeup, wakeup_earlier, wakeup_later], | |
263 | reference, mocker.MagicMock()) == earlier | |
264 | ||
265 | def test_ignore_temporary_errors(self, mocker): | |
266 | now = datetime.now(timezone.utc) | |
267 | ||
268 | wakeup = mocker.MagicMock( | |
269 | spec=autosuspend.Wakeup) | |
270 | wakeup.check.return_value = now + timedelta(seconds=20) | |
271 | wakeup_error = mocker.MagicMock( | |
272 | spec=autosuspend.Wakeup) | |
273 | wakeup_error.check.side_effect = autosuspend.TemporaryCheckError() | |
274 | wakeup_earlier = mocker.MagicMock( | |
275 | spec=autosuspend.Wakeup) | |
276 | wakeup_earlier.check.return_value = now + timedelta(seconds=10) | |
277 | assert autosuspend.execute_wakeups( | |
278 | [wakeup, wakeup_error, wakeup_earlier], | |
279 | now, mocker.MagicMock()) == now + timedelta(seconds=10) | |
280 | ||
281 | def test_ignore_too_early(self, mocker): | |
282 | now = datetime.now(timezone.utc) | |
283 | wakeup = mocker.MagicMock( | |
284 | spec=autosuspend.Wakeup) | |
285 | wakeup.check.return_value = now | |
286 | assert autosuspend.execute_wakeups( | |
287 | [wakeup], now, mocker.MagicMock()) is None | |
288 | assert autosuspend.execute_wakeups( | |
289 | [wakeup], now + timedelta(seconds=1), mocker.MagicMock()) is None | |
290 | ||
291 | ||
292 | class TestNotifySuspend: | |
293 | ||
294 | def test_date(self, mocker): | |
295 | mock = mocker.patch('subprocess.check_call') | |
296 | dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4))) | |
297 | autosuspend.notify_suspend( | |
298 | 'echo {timestamp:.0f} {iso}', 'not this', dt) | |
299 | mock.assert_called_once_with( | |
300 | 'echo 1525270801 2018-05-02T18:20:01+04:00', shell=True) | |
301 | ||
302 | def test_date_no_command(self, mocker): | |
303 | mock = mocker.patch('subprocess.check_call') | |
304 | dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4))) | |
305 | autosuspend.notify_suspend(None, 'not this', dt) | |
306 | mock.assert_not_called() | |
307 | ||
308 | def test_no_date(self, mocker): | |
309 | mock = mocker.patch('subprocess.check_call') | |
310 | autosuspend.notify_suspend( | |
311 | 'echo {timestamp:.0f} {iso}', 'echo nothing', None) | |
312 | mock.assert_called_once_with('echo nothing', shell=True) | |
313 | ||
314 | def test_no_date_no_command(self, mocker): | |
315 | mock = mocker.patch('subprocess.check_call') | |
316 | autosuspend.notify_suspend( | |
317 | 'echo {timestamp:.0f} {iso}', None, None) | |
318 | mock.assert_not_called() | |
319 | ||
320 | def test_ignore_execution_errors(self, mocker): | |
321 | mock = mocker.patch('subprocess.check_call') | |
322 | mock.side_effect = subprocess.CalledProcessError(2, 'cmd') | |
323 | dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4))) | |
324 | autosuspend.notify_suspend(None, 'not this', dt) | |
325 | ||
326 | ||
327 | def test_notify_and_suspend(mocker): | |
328 | mock = mocker.patch('subprocess.check_call') | |
329 | dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4))) | |
330 | autosuspend.notify_and_suspend('echo suspend', | |
331 | 'echo notify {timestamp:.0f} {iso}', | |
332 | 'not this', | |
333 | dt) | |
334 | mock.assert_has_calls([ | |
335 | mocker.call('echo notify 1525270801 2018-05-02T18:20:01+04:00', | |
336 | shell=True), | |
337 | mocker.call('echo suspend', shell=True)]) | |
338 | ||
339 | ||
340 | class _StubCheck(autosuspend.Activity): | |
341 | ||
342 | @classmethod | |
343 | def create(cls, name, config): | |
344 | pass | |
345 | ||
346 | def __init__(self, name, match): | |
347 | autosuspend.Activity.__init__(self, name) | |
348 | self.match = match | |
349 | ||
350 | def check(self): | |
351 | return self.match | |
352 | ||
353 | ||
354 | @pytest.fixture | |
355 | def sleep_fn(): | |
356 | ||
357 | class Func: | |
358 | ||
359 | def __init__(self): | |
360 | self.called = False | |
361 | self.call_arg = None | |
362 | ||
363 | def reset(self): | |
364 | self.called = False | |
365 | self.call_arg = None | |
366 | ||
367 | def __call__(self, arg): | |
368 | self.called = True | |
369 | self.call_arg = arg | |
370 | ||
371 | return Func() | |
372 | ||
373 | ||
374 | @pytest.fixture | |
375 | def wakeup_fn(): | |
376 | ||
377 | class Func: | |
378 | ||
379 | def __init__(self): | |
380 | self.call_arg = None | |
381 | ||
382 | def reset(self): | |
383 | self.call_arg = None | |
384 | ||
385 | def __call__(self, arg): | |
386 | self.call_arg = arg | |
387 | ||
388 | return Func() | |
389 | ||
390 | ||
391 | class TestProcessor: | |
392 | ||
393 | def test_smoke(self, sleep_fn, wakeup_fn): | |
394 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
395 | [], | |
396 | 2, | |
397 | 0, | |
398 | 0, | |
399 | sleep_fn, | |
400 | wakeup_fn, | |
401 | False) | |
402 | # should init the timestamp initially | |
403 | start = datetime.now(timezone.utc) | |
404 | processor.iteration(start, False) | |
405 | assert not sleep_fn.called | |
406 | # not yet reached | |
407 | processor.iteration(start + timedelta(seconds=1), False) | |
408 | assert not sleep_fn.called | |
409 | # time must be greater, not equal | |
410 | processor.iteration(start + timedelta(seconds=2), False) | |
411 | assert not sleep_fn.called | |
412 | # go to sleep | |
413 | processor.iteration(start + timedelta(seconds=3), False) | |
414 | assert sleep_fn.called | |
415 | assert sleep_fn.call_arg is None | |
416 | ||
417 | sleep_fn.reset() | |
418 | ||
419 | # second iteration to check that the idle time got reset | |
420 | processor.iteration(start + timedelta(seconds=4), False) | |
421 | assert not sleep_fn.called | |
422 | # go to sleep again | |
423 | processor.iteration(start + timedelta(seconds=6, milliseconds=2), | |
424 | False) | |
425 | assert sleep_fn.called | |
426 | ||
427 | assert wakeup_fn.call_arg is None | |
428 | ||
429 | def test_just_woke_up_handling(self, sleep_fn, wakeup_fn): | |
430 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
431 | [], | |
432 | 2, | |
433 | 0, | |
434 | 0, | |
435 | sleep_fn, | |
436 | wakeup_fn, | |
437 | False) | |
438 | ||
439 | # should init the timestamp initially | |
440 | start = datetime.now(timezone.utc) | |
441 | processor.iteration(start, False) | |
442 | assert not sleep_fn.called | |
443 | # should go to sleep but we just woke up | |
444 | processor.iteration(start + timedelta(seconds=3), True) | |
445 | assert not sleep_fn.called | |
446 | # start over again | |
447 | processor.iteration(start + timedelta(seconds=4), False) | |
448 | assert not sleep_fn.called | |
449 | # not yet sleeping | |
450 | processor.iteration(start + timedelta(seconds=6), False) | |
451 | assert not sleep_fn.called | |
452 | # now go to sleep | |
453 | processor.iteration(start + timedelta(seconds=7), False) | |
454 | assert sleep_fn.called | |
455 | ||
456 | assert wakeup_fn.call_arg is None | |
457 | ||
458 | def test_wakeup_blocks_sleep(self, mocker, sleep_fn, wakeup_fn): | |
459 | start = datetime.now(timezone.utc) | |
460 | wakeup = mocker.MagicMock(spec=autosuspend.Wakeup) | |
461 | wakeup.check.return_value = start + timedelta(seconds=6) | |
462 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
463 | [wakeup], | |
464 | 2, | |
465 | 10, | |
466 | 0, | |
467 | sleep_fn, | |
468 | wakeup_fn, | |
469 | False) | |
470 | ||
471 | # init iteration | |
472 | processor.iteration(start, False) | |
473 | # no activity and enough time passed to start sleeping | |
474 | processor.iteration(start + timedelta(seconds=3), False) | |
475 | assert not sleep_fn.called | |
476 | assert wakeup_fn.call_arg is None | |
477 | ||
478 | def test_wakeup_scheduled(self, mocker, sleep_fn, wakeup_fn): | |
479 | start = datetime.now(timezone.utc) | |
480 | wakeup = mocker.MagicMock(spec=autosuspend.Wakeup) | |
481 | wakeup.check.return_value = start + timedelta(seconds=25) | |
482 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
483 | [wakeup], | |
484 | 2, | |
485 | 10, | |
486 | 0, | |
487 | sleep_fn, | |
488 | wakeup_fn, | |
489 | False) | |
490 | ||
491 | # init iteration | |
492 | processor.iteration(start, False) | |
493 | # no activity and enough time passed to start sleeping | |
494 | processor.iteration(start + timedelta(seconds=3), False) | |
495 | assert sleep_fn.called | |
496 | assert sleep_fn.call_arg == start + timedelta(seconds=25) | |
497 | assert wakeup_fn.call_arg == start + timedelta(seconds=25) | |
498 | ||
499 | sleep_fn.reset() | |
500 | wakeup_fn.reset() | |
501 | ||
502 | # ensure that wake up is not scheduled again | |
503 | processor.iteration(start + timedelta(seconds=25), False) | |
504 | assert wakeup_fn.call_arg is None | |
505 | ||
506 | def test_wakeup_delta_blocks(self, mocker, sleep_fn, wakeup_fn): | |
507 | start = datetime.now(timezone.utc) | |
508 | wakeup = mocker.MagicMock(spec=autosuspend.Wakeup) | |
509 | wakeup.check.return_value = start + timedelta(seconds=25) | |
510 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
511 | [wakeup], | |
512 | 2, | |
513 | 10, | |
514 | 22, | |
515 | sleep_fn, | |
516 | wakeup_fn, | |
517 | False) | |
518 | ||
519 | # init iteration | |
520 | processor.iteration(start, False) | |
521 | # no activity and enough time passed to start sleeping | |
522 | processor.iteration(start + timedelta(seconds=3), False) | |
523 | assert not sleep_fn.called | |
524 | ||
525 | def test_wakeup_delta_applied(self, mocker, sleep_fn, wakeup_fn): | |
526 | start = datetime.now(timezone.utc) | |
527 | wakeup = mocker.MagicMock(spec=autosuspend.Wakeup) | |
528 | wakeup.check.return_value = start + timedelta(seconds=25) | |
529 | processor = autosuspend.Processor([_StubCheck('stub', None)], | |
530 | [wakeup], | |
531 | 2, | |
532 | 10, | |
533 | 4, | |
534 | sleep_fn, | |
535 | wakeup_fn, | |
536 | False) | |
537 | ||
538 | # init iteration | |
539 | processor.iteration(start, False) | |
540 | # no activity and enough time passed to start sleeping | |
541 | processor.iteration(start + timedelta(seconds=3), False) | |
542 | assert sleep_fn.called | |
543 | assert wakeup_fn.call_arg == start + timedelta(seconds=21) |
0 | from autosuspend.checks import Check | |
1 | ||
2 | ||
3 | class TestCheck: | |
4 | ||
5 | class DummyCheck(Check): | |
6 | ||
7 | @classmethod | |
8 | def create(cls, name, config): | |
9 | pass | |
10 | ||
11 | def check(self): | |
12 | pass | |
13 | ||
14 | def test_name(self): | |
15 | name = 'test' | |
16 | assert self.DummyCheck(name).name == name | |
17 | ||
18 | def test_name_default(self): | |
19 | assert self.DummyCheck().name is not None | |
20 | ||
21 | def test_str(self): | |
22 | assert isinstance(str(self.DummyCheck('test')), str) |
0 | from collections import namedtuple | |
1 | import configparser | |
2 | import json | |
3 | import os | |
4 | import os.path | |
5 | import pwd | |
6 | import re | |
7 | import socket | |
8 | import subprocess | |
9 | import sys | |
10 | ||
11 | import psutil | |
12 | import pytest | |
13 | import requests | |
14 | ||
15 | from autosuspend.checks import (ConfigurationError, | |
16 | SevereCheckError, | |
17 | TemporaryCheckError) | |
18 | from autosuspend.checks.activity import (ActiveCalendarEvent, | |
19 | ActiveConnection, | |
20 | ExternalCommand, | |
21 | Kodi, | |
22 | KodiIdleTime, | |
23 | Load, | |
24 | LogindSessionsIdle, | |
25 | Mpd, | |
26 | NetworkBandwidth, | |
27 | Ping, | |
28 | Processes, | |
29 | Smb, | |
30 | Users, | |
31 | XIdleTime, | |
32 | XPath) | |
33 | from . import CheckTest | |
34 | ||
35 | ||
36 | snic = namedtuple('snic', ['family', 'address', 'netmask', 'broadcast', 'ptp']) | |
37 | ||
38 | ||
39 | class TestSmb(CheckTest): | |
40 | ||
41 | def create_instance(self, name): | |
42 | return Smb(name) | |
43 | ||
44 | def test_no_connections(self, monkeypatch): | |
45 | def return_data(*args, **kwargs): | |
46 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
47 | 'smbstatus_no_connections'), 'rb') as f: | |
48 | return f.read() | |
49 | monkeypatch.setattr(subprocess, 'check_output', return_data) | |
50 | ||
51 | assert Smb('foo').check() is None | |
52 | ||
53 | def test_with_connections(self, monkeypatch): | |
54 | def return_data(*args, **kwargs): | |
55 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
56 | 'smbstatus_with_connections'), 'rb') as f: | |
57 | return f.read() | |
58 | monkeypatch.setattr(subprocess, 'check_output', return_data) | |
59 | ||
60 | assert Smb('foo').check() is not None | |
61 | assert len(Smb('foo').check().splitlines()) == 3 | |
62 | ||
63 | def test_call_error(self, mocker): | |
64 | mocker.patch('subprocess.check_output', | |
65 | side_effect=subprocess.CalledProcessError(2, 'cmd')) | |
66 | ||
67 | with pytest.raises(SevereCheckError): | |
68 | Smb('foo').check() | |
69 | ||
70 | def test_create(self): | |
71 | assert isinstance(Smb.create('name', None), Smb) | |
72 | ||
73 | ||
74 | class TestUsers(CheckTest): | |
75 | ||
76 | def create_instance(self, name): | |
77 | return Users(name, re.compile('.*'), re.compile('.*'), | |
78 | re.compile('.*')) | |
79 | ||
80 | @staticmethod | |
81 | def create_suser(name, terminal, host, started, pid): | |
82 | try: | |
83 | return psutil._common.suser(name, terminal, host, started, pid) | |
84 | except TypeError: | |
85 | # psutil 5.0 | |
86 | return psutil._common.suser(name, terminal, host, started) | |
87 | ||
88 | def test_no_users(self, monkeypatch): | |
89 | ||
90 | def data(): | |
91 | return [] | |
92 | monkeypatch.setattr(psutil, 'users', data) | |
93 | ||
94 | assert Users('users', re.compile('.*'), re.compile('.*'), | |
95 | re.compile('.*')).check() is None | |
96 | ||
97 | def test_smoke(self): | |
98 | Users('users', re.compile('.*'), re.compile('.*'), | |
99 | re.compile('.*')).check() | |
100 | ||
101 | def test_matching_users(self, monkeypatch): | |
102 | ||
103 | def data(): | |
104 | return [self.create_suser('foo', 'pts1', 'host', 12345, 12345)] | |
105 | monkeypatch.setattr(psutil, 'users', data) | |
106 | ||
107 | assert Users('users', re.compile('.*'), re.compile('.*'), | |
108 | re.compile('.*')).check() is not None | |
109 | ||
110 | def test_non_matching_user(self, monkeypatch): | |
111 | ||
112 | def data(): | |
113 | return [self.create_suser('foo', 'pts1', 'host', 12345, 12345)] | |
114 | monkeypatch.setattr(psutil, 'users', data) | |
115 | ||
116 | assert Users('users', re.compile('narf'), re.compile('.*'), | |
117 | re.compile('.*')).check() is None | |
118 | ||
119 | def test_create(self): | |
120 | parser = configparser.ConfigParser() | |
121 | parser.read_string('''[section] | |
122 | name = name.*name | |
123 | terminal = term.*term | |
124 | host = host.*host''') | |
125 | ||
126 | check = Users.create('name', parser['section']) | |
127 | ||
128 | assert check._user_regex == re.compile('name.*name') | |
129 | assert check._terminal_regex == re.compile('term.*term') | |
130 | assert check._host_regex == re.compile('host.*host') | |
131 | ||
132 | def test_create_regex_error(self): | |
133 | parser = configparser.ConfigParser() | |
134 | parser.read_string('''[section] | |
135 | name = name.*name | |
136 | terminal = term.[[a-9]term | |
137 | host = host.*host''') | |
138 | ||
139 | with pytest.raises(ConfigurationError): | |
140 | Users.create('name', parser['section']) | |
141 | ||
142 | ||
143 | class TestProcesses(CheckTest): | |
144 | ||
145 | def create_instance(self, name): | |
146 | return Processes(name, ['foo']) | |
147 | ||
148 | class StubProcess: | |
149 | ||
150 | def __init__(self, name): | |
151 | self._name = name | |
152 | ||
153 | def name(self): | |
154 | return self._name | |
155 | ||
156 | class RaisingProcess: | |
157 | ||
158 | def name(self): | |
159 | raise psutil.NoSuchProcess(42) | |
160 | ||
161 | def test_matching_process(self, monkeypatch): | |
162 | ||
163 | def data(): | |
164 | return [self.StubProcess('blubb'), self.StubProcess('nonmatching')] | |
165 | monkeypatch.setattr(psutil, 'process_iter', data) | |
166 | ||
167 | assert Processes( | |
168 | 'foo', ['dummy', 'blubb', 'other']).check() is not None | |
169 | ||
170 | def test_ignore_no_such_process(self, monkeypatch): | |
171 | ||
172 | def data(): | |
173 | return [self.RaisingProcess()] | |
174 | monkeypatch.setattr(psutil, 'process_iter', data) | |
175 | ||
176 | Processes('foo', ['dummy']).check() | |
177 | ||
178 | def test_non_matching_process(self, monkeypatch): | |
179 | ||
180 | def data(): | |
181 | return [self.StubProcess('asdfasdf'), | |
182 | self.StubProcess('nonmatching')] | |
183 | monkeypatch.setattr(psutil, 'process_iter', data) | |
184 | ||
185 | assert Processes( | |
186 | 'foo', ['dummy', 'blubb', 'other']).check() is None | |
187 | ||
188 | def test_create(self): | |
189 | parser = configparser.ConfigParser() | |
190 | parser.read_string('''[section] | |
191 | processes = foo, bar, narf''') | |
192 | assert Processes.create( | |
193 | 'name', parser['section'])._processes == ['foo', 'bar', 'narf'] | |
194 | ||
195 | def test_create_no_entry(self): | |
196 | parser = configparser.ConfigParser() | |
197 | parser.read_string('''[section]''') | |
198 | with pytest.raises(ConfigurationError): | |
199 | Processes.create('name', parser['section']) | |
200 | ||
201 | ||
202 | class TestActiveCalendarEvent(CheckTest): | |
203 | ||
204 | def create_instance(self, name): | |
205 | return ActiveCalendarEvent(name, url='asdfasdf', timeout=5) | |
206 | ||
207 | def test_smoke(self, stub_server): | |
208 | address = stub_server.resource_address('long-event.ics') | |
209 | result = ActiveCalendarEvent('test', url=address, timeout=3).check() | |
210 | assert result is not None | |
211 | assert 'long-event' in result | |
212 | ||
213 | def test_no_event(self, stub_server): | |
214 | address = stub_server.resource_address('old-event.ics') | |
215 | assert ActiveCalendarEvent( | |
216 | 'test', url=address, timeout=3).check() is None | |
217 | ||
218 | def test_create(self): | |
219 | parser = configparser.ConfigParser() | |
220 | parser.read_string('''[section] | |
221 | url = foobar | |
222 | username = user | |
223 | password = pass | |
224 | timeout = 3''') | |
225 | check = ActiveCalendarEvent.create('name', parser['section']) | |
226 | assert check._url == 'foobar' | |
227 | assert check._username == 'user' | |
228 | assert check._password == 'pass' | |
229 | assert check._timeout == 3 | |
230 | ||
231 | ||
232 | class TestActiveConnection(CheckTest): | |
233 | ||
234 | MY_PORT = 22 | |
235 | MY_ADDRESS = '123.456.123.456' | |
236 | ||
237 | def create_instance(self, name): | |
238 | return ActiveConnection(name, [10]) | |
239 | ||
240 | def test_smoke(self): | |
241 | ActiveConnection('foo', [22]).check() | |
242 | ||
243 | def test_connected(self, monkeypatch): | |
244 | ||
245 | def addresses(): | |
246 | return {'dummy': [snic( | |
247 | socket.AF_INET, self.MY_ADDRESS, '255.255.255.0', | |
248 | None, None)]} | |
249 | ||
250 | def connections(): | |
251 | return [psutil._common.sconn( | |
252 | -1, socket.AF_INET, socket.SOCK_STREAM, | |
253 | (self.MY_ADDRESS, self.MY_PORT), | |
254 | ('42.42.42.42', 42), 'ESTABLISHED', None)] | |
255 | ||
256 | monkeypatch.setattr(psutil, 'net_if_addrs', addresses) | |
257 | monkeypatch.setattr(psutil, 'net_connections', connections) | |
258 | ||
259 | assert ActiveConnection( | |
260 | 'foo', [10, self.MY_PORT, 30]).check() is not None | |
261 | ||
262 | @pytest.mark.parametrize("connection", [ | |
263 | # not my port | |
264 | psutil._common.sconn(-1, | |
265 | socket.AF_INET, socket.SOCK_STREAM, | |
266 | (MY_ADDRESS, 32), | |
267 | ('42.42.42.42', 42), | |
268 | 'ESTABLISHED', None), | |
269 | # not my local address | |
270 | psutil._common.sconn(-1, | |
271 | socket.AF_INET, socket.SOCK_STREAM, | |
272 | ('33.33.33.33', MY_PORT), | |
273 | ('42.42.42.42', 42), | |
274 | 'ESTABLISHED', None), | |
275 | # not my established | |
276 | psutil._common.sconn(-1, | |
277 | socket.AF_INET, socket.SOCK_STREAM, | |
278 | (MY_ADDRESS, MY_PORT), | |
279 | ('42.42.42.42', 42), | |
280 | 'NARF', None), | |
281 | # I am the client | |
282 | psutil._common.sconn(-1, | |
283 | socket.AF_INET, socket.SOCK_STREAM, | |
284 | ('42.42.42.42', 42), | |
285 | (MY_ADDRESS, MY_PORT), | |
286 | 'NARF', None), | |
287 | ]) | |
288 | def test_not_connected(self, monkeypatch, connection): | |
289 | ||
290 | def addresses(): | |
291 | return {'dummy': [snic( | |
292 | socket.AF_INET, self.MY_ADDRESS, '255.255.255.0', | |
293 | None, None)]} | |
294 | ||
295 | def connections(): | |
296 | return [connection] | |
297 | ||
298 | monkeypatch.setattr(psutil, 'net_if_addrs', addresses) | |
299 | monkeypatch.setattr(psutil, 'net_connections', connections) | |
300 | ||
301 | assert ActiveConnection( | |
302 | 'foo', [10, self.MY_PORT, 30]).check() is None | |
303 | ||
304 | def test_create(self): | |
305 | parser = configparser.ConfigParser() | |
306 | parser.read_string('''[section] | |
307 | ports = 10,20,30''') | |
308 | assert ActiveConnection.create( | |
309 | 'name', parser['section'])._ports == {10, 20, 30} | |
310 | ||
311 | def test_create_no_entry(self): | |
312 | parser = configparser.ConfigParser() | |
313 | parser.read_string('''[section]''') | |
314 | with pytest.raises(ConfigurationError): | |
315 | ActiveConnection.create('name', parser['section']) | |
316 | ||
317 | def test_create_no_number(self): | |
318 | parser = configparser.ConfigParser() | |
319 | parser.read_string('''[section] | |
320 | ports = 10,20xx,30''') | |
321 | with pytest.raises(ConfigurationError): | |
322 | ActiveConnection.create('name', parser['section']) | |
323 | ||
324 | ||
325 | class TestLoad(CheckTest): | |
326 | ||
327 | def create_instance(self, name): | |
328 | return Load(name, 0.4) | |
329 | ||
330 | def test_below(self, monkeypatch): | |
331 | ||
332 | threshold = 1.34 | |
333 | ||
334 | def data(): | |
335 | return [0, threshold - 0.2, 0] | |
336 | monkeypatch.setattr(os, 'getloadavg', data) | |
337 | ||
338 | assert Load('foo', threshold).check() is None | |
339 | ||
340 | def test_above(self, monkeypatch): | |
341 | ||
342 | threshold = 1.34 | |
343 | ||
344 | def data(): | |
345 | return [0, threshold + 0.2, 0] | |
346 | monkeypatch.setattr(os, 'getloadavg', data) | |
347 | ||
348 | assert Load('foo', threshold).check() is not None | |
349 | ||
350 | def test_create(self): | |
351 | parser = configparser.ConfigParser() | |
352 | parser.read_string('''[section] | |
353 | threshold = 3.2''') | |
354 | assert Load.create( | |
355 | 'name', parser['section'])._threshold == 3.2 | |
356 | ||
357 | def test_create_no_number(self): | |
358 | parser = configparser.ConfigParser() | |
359 | parser.read_string('''[section] | |
360 | threshold = narf''') | |
361 | with pytest.raises(ConfigurationError): | |
362 | Load.create('name', parser['section']) | |
363 | ||
364 | ||
365 | class TestMpd(CheckTest): | |
366 | ||
367 | def create_instance(self, name): | |
368 | return Mpd(name, None, None, None) | |
369 | ||
370 | def test_playing(self, monkeypatch): | |
371 | ||
372 | check = Mpd('test', None, None, None) | |
373 | ||
374 | def get_state(): | |
375 | return {'state': 'play'} | |
376 | monkeypatch.setattr(check, '_get_state', get_state) | |
377 | ||
378 | assert check.check() is not None | |
379 | ||
380 | def test_not_playing(self, monkeypatch): | |
381 | ||
382 | check = Mpd('test', None, None, None) | |
383 | ||
384 | def get_state(): | |
385 | return {'state': 'pause'} | |
386 | monkeypatch.setattr(check, '_get_state', get_state) | |
387 | ||
388 | assert check.check() is None | |
389 | ||
390 | def test_correct_mpd_interaction(self, mocker): | |
391 | import mpd | |
392 | ||
393 | mock_instance = mocker.MagicMock(spec=mpd.MPDClient) | |
394 | mock_instance.status.return_value = {'state': 'play'} | |
395 | timeout_property = mocker.PropertyMock() | |
396 | type(mock_instance).timeout = timeout_property | |
397 | mock = mocker.patch('mpd.MPDClient') | |
398 | mock.return_value = mock_instance | |
399 | ||
400 | host = 'foo' | |
401 | port = 42 | |
402 | timeout = 17 | |
403 | ||
404 | assert Mpd('name', host, port, timeout).check() is not None | |
405 | ||
406 | timeout_property.assert_called_once_with(timeout) | |
407 | mock_instance.connect.assert_called_once_with(host, port) | |
408 | mock_instance.status.assert_called_once_with() | |
409 | mock_instance.close.assert_called_once_with() | |
410 | mock_instance.disconnect.assert_called_once_with() | |
411 | ||
412 | def test_handle_connection_errors(self): | |
413 | ||
414 | check = Mpd('test', None, None, None) | |
415 | ||
416 | def _get_state(): | |
417 | raise ConnectionError() | |
418 | ||
419 | check._get_state = _get_state | |
420 | ||
421 | with pytest.raises(TemporaryCheckError): | |
422 | check.check() | |
423 | ||
424 | def test_create(self): | |
425 | parser = configparser.ConfigParser() | |
426 | parser.read_string('''[section] | |
427 | host = host | |
428 | port = 1234 | |
429 | timeout = 12''') | |
430 | ||
431 | check = Mpd.create('name', parser['section']) | |
432 | ||
433 | assert check._host == 'host' | |
434 | assert check._port == 1234 | |
435 | assert check._timeout == 12 | |
436 | ||
437 | def test_create_port_no_number(self): | |
438 | parser = configparser.ConfigParser() | |
439 | parser.read_string('''[section] | |
440 | host = host | |
441 | port = string | |
442 | timeout = 12''') | |
443 | ||
444 | with pytest.raises(ConfigurationError): | |
445 | Mpd.create('name', parser['section']) | |
446 | ||
447 | def test_create_timeout_no_number(self): | |
448 | parser = configparser.ConfigParser() | |
449 | parser.read_string('''[section] | |
450 | host = host | |
451 | port = 10 | |
452 | timeout = string''') | |
453 | ||
454 | with pytest.raises(ConfigurationError): | |
455 | Mpd.create('name', parser['section']) | |
456 | ||
457 | ||
458 | class TestNetworkBandwidth(CheckTest): | |
459 | ||
460 | def create_instance(self, name): | |
461 | return NetworkBandwidth(name, psutil.net_if_addrs().keys(), 0, 0) | |
462 | ||
463 | def test_smoke(self, stub_server): | |
464 | check = NetworkBandwidth( | |
465 | 'name', psutil.net_if_addrs().keys(), 0, 0) | |
466 | # make some traffic | |
467 | requests.get(stub_server.resource_address('')) | |
468 | assert check.check() is not None | |
469 | ||
470 | @pytest.fixture | |
471 | def mock_interfaces(self, mocker): | |
472 | mock = mocker.patch('psutil.net_if_addrs') | |
473 | mock.return_value = {'foo': None, 'bar': None, 'baz': None} | |
474 | ||
475 | def test_create(self, mock_interfaces): | |
476 | parser = configparser.ConfigParser() | |
477 | parser.read_string(''' | |
478 | [section] | |
479 | interfaces = foo, baz | |
480 | threshold_send = 200 | |
481 | threshold_receive = 300 | |
482 | ''') | |
483 | check = NetworkBandwidth.create('name', parser['section']) | |
484 | assert set(check._interfaces) == {'foo', 'baz'} | |
485 | assert check._threshold_send == 200 | |
486 | assert check._threshold_receive == 300 | |
487 | ||
488 | def test_create_default(self, mock_interfaces): | |
489 | parser = configparser.ConfigParser() | |
490 | parser.read_string(''' | |
491 | [section] | |
492 | interfaces = foo, baz | |
493 | ''') | |
494 | check = NetworkBandwidth.create('name', parser['section']) | |
495 | assert set(check._interfaces) == {'foo', 'baz'} | |
496 | assert check._threshold_send == 100 | |
497 | assert check._threshold_receive == 100 | |
498 | ||
499 | @pytest.mark.parametrize("config,error_match", [ | |
500 | (''' | |
501 | [section] | |
502 | interfaces = foo, NOTEXIST | |
503 | threshold_send = 200 | |
504 | threshold_receive = 300 | |
505 | ''', r'does not exist'), | |
506 | (''' | |
507 | [section] | |
508 | threshold_send = 200 | |
509 | threshold_receive = 300 | |
510 | ''', r'configuration key: \'interfaces\''), | |
511 | (''' | |
512 | [section] | |
513 | interfaces = | |
514 | threshold_send = 200 | |
515 | threshold_receive = 300 | |
516 | ''', r'No interfaces configured'), | |
517 | (''' | |
518 | [section] | |
519 | interfaces = foo, bar | |
520 | threshold_send = xxx | |
521 | ''', r'Threshold in wrong format'), | |
522 | (''' | |
523 | [section] | |
524 | interfaces = foo, bar | |
525 | threshold_receive = xxx | |
526 | ''', r'Threshold in wrong format'), | |
527 | ]) | |
528 | def test_create_error(self, mock_interfaces, config, error_match): | |
529 | parser = configparser.ConfigParser() | |
530 | parser.read_string(config) | |
531 | with pytest.raises(ConfigurationError, match=error_match): | |
532 | NetworkBandwidth.create('name', parser['section']) | |
533 | ||
534 | @pytest.mark.parametrize('send_threshold,receive_threshold,match', [ | |
535 | (sys.float_info.max, 0, 'receive'), | |
536 | (0, sys.float_info.max, 'sending'), | |
537 | ]) | |
538 | def test_with_activity(self, send_threshold, receive_threshold, match, | |
539 | stub_server): | |
540 | check = NetworkBandwidth( | |
541 | 'name', psutil.net_if_addrs().keys(), | |
542 | send_threshold, receive_threshold) | |
543 | # make some traffic | |
544 | requests.get(stub_server.resource_address('')) | |
545 | res = check.check() | |
546 | assert res is not None | |
547 | assert match in res | |
548 | ||
549 | def test_no_activity(self, stub_server): | |
550 | check = NetworkBandwidth( | |
551 | 'name', psutil.net_if_addrs().keys(), | |
552 | sys.float_info.max, sys.float_info.max) | |
553 | # make some traffic | |
554 | requests.get(stub_server.resource_address('')) | |
555 | assert check.check() is None | |
556 | ||
557 | ||
558 | class TestKodi(CheckTest): | |
559 | ||
560 | def create_instance(self, name): | |
561 | return Kodi(name, url='url', timeout=10) | |
562 | ||
563 | def test_playing(self, mocker): | |
564 | mock_reply = mocker.MagicMock() | |
565 | mock_reply.json.return_value = { | |
566 | "id": 1, "jsonrpc": "2.0", | |
567 | "result": [{"playerid": 0, "type": "audio"}]} | |
568 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
569 | ||
570 | assert Kodi('foo', url='url', timeout=10).check() is not None | |
571 | ||
572 | mock_reply.json.assert_called_once_with() | |
573 | ||
574 | def test_not_playing(self, mocker): | |
575 | mock_reply = mocker.MagicMock() | |
576 | mock_reply.json.return_value = { | |
577 | "id": 1, "jsonrpc": "2.0", "result": []} | |
578 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
579 | ||
580 | assert Kodi('foo', url='url', timeout=10).check() is None | |
581 | ||
582 | mock_reply.json.assert_called_once_with() | |
583 | ||
584 | def test_assertion_no_result(self, mocker): | |
585 | mock_reply = mocker.MagicMock() | |
586 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0"} | |
587 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
588 | ||
589 | with pytest.raises(TemporaryCheckError): | |
590 | Kodi('foo', url='url', timeout=10).check() | |
591 | ||
592 | def test_request_error(self, mocker): | |
593 | mocker.patch('requests.Session.get', | |
594 | side_effect=requests.exceptions.RequestException()) | |
595 | ||
596 | with pytest.raises(TemporaryCheckError): | |
597 | Kodi('foo', url='url', timeout=10).check() | |
598 | ||
599 | def test_json_error(self, mocker): | |
600 | mock_reply = mocker.MagicMock() | |
601 | mock_reply.json.side_effect = json.JSONDecodeError('test', 'test', 42) | |
602 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
603 | ||
604 | with pytest.raises(TemporaryCheckError): | |
605 | Kodi('foo', url='url', timeout=10).check() | |
606 | ||
607 | def test_create(self): | |
608 | parser = configparser.ConfigParser() | |
609 | parser.read_string('''[section] | |
610 | url = anurl | |
611 | timeout = 12''') | |
612 | ||
613 | check = Kodi.create('name', parser['section']) | |
614 | ||
615 | assert check._url.startswith('anurl') | |
616 | assert check._timeout == 12 | |
617 | ||
618 | def test_create_timeout_no_number(self): | |
619 | parser = configparser.ConfigParser() | |
620 | parser.read_string('''[section] | |
621 | url = anurl | |
622 | timeout = string''') | |
623 | ||
624 | with pytest.raises(ConfigurationError): | |
625 | Kodi.create('name', parser['section']) | |
626 | ||
627 | ||
628 | class TestKodiIdleTime(CheckTest): | |
629 | ||
630 | def create_instance(self, name): | |
631 | return KodiIdleTime(name, url='url', timeout=10, idle_time=10) | |
632 | ||
633 | def test_create(self): | |
634 | parser = configparser.ConfigParser() | |
635 | parser.read_string('''[section] | |
636 | url = anurl | |
637 | timeout = 12 | |
638 | idle_time = 42''') | |
639 | ||
640 | check = KodiIdleTime.create('name', parser['section']) | |
641 | ||
642 | assert check._url.startswith('anurl') | |
643 | assert check._timeout == 12 | |
644 | assert check._idle_time == 42 | |
645 | ||
646 | def test_create_timeout_no_number(self): | |
647 | parser = configparser.ConfigParser() | |
648 | parser.read_string('''[section] | |
649 | url = anurl | |
650 | timeout = string''') | |
651 | ||
652 | with pytest.raises(ConfigurationError): | |
653 | KodiIdleTime.create('name', parser['section']) | |
654 | ||
655 | def test_create_idle_time_no_number(self): | |
656 | parser = configparser.ConfigParser() | |
657 | parser.read_string('''[section] | |
658 | url = anurl | |
659 | idle_time = string''') | |
660 | ||
661 | with pytest.raises(ConfigurationError): | |
662 | KodiIdleTime.create('name', parser['section']) | |
663 | ||
664 | def test_no_result(self, mocker): | |
665 | mock_reply = mocker.MagicMock() | |
666 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0"} | |
667 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
668 | ||
669 | with pytest.raises(TemporaryCheckError): | |
670 | KodiIdleTime('foo', url='url', timeout=10, idle_time=42).check() | |
671 | ||
672 | def test_result_is_list(self, mocker): | |
673 | mock_reply = mocker.MagicMock() | |
674 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", | |
675 | "result": []} | |
676 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
677 | ||
678 | with pytest.raises(TemporaryCheckError): | |
679 | KodiIdleTime('foo', url='url', timeout=10, idle_time=42).check() | |
680 | ||
681 | def test_result_no_entry(self, mocker): | |
682 | mock_reply = mocker.MagicMock() | |
683 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", | |
684 | "result": {}} | |
685 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
686 | ||
687 | with pytest.raises(TemporaryCheckError): | |
688 | KodiIdleTime('foo', url='url', timeout=10, idle_time=42).check() | |
689 | ||
690 | def test_result_wrong_entry(self, mocker): | |
691 | mock_reply = mocker.MagicMock() | |
692 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", | |
693 | "result": {"narf": True}} | |
694 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
695 | ||
696 | with pytest.raises(TemporaryCheckError): | |
697 | KodiIdleTime('foo', url='url', timeout=10, idle_time=42).check() | |
698 | ||
699 | def test_active(self, mocker): | |
700 | mock_reply = mocker.MagicMock() | |
701 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", | |
702 | "result": { | |
703 | "System.IdleTime(42)": True}} | |
704 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
705 | ||
706 | assert KodiIdleTime('foo', url='url', | |
707 | timeout=10, idle_time=42).check() is not None | |
708 | ||
709 | def test_inactive(self, mocker): | |
710 | mock_reply = mocker.MagicMock() | |
711 | mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", | |
712 | "result": { | |
713 | "System.IdleTime(42)": False}} | |
714 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
715 | ||
716 | assert KodiIdleTime('foo', url='url', | |
717 | timeout=10, idle_time=42).check() is None | |
718 | ||
719 | def test_request_error(self, mocker): | |
720 | mocker.patch('requests.Session.get', | |
721 | side_effect=requests.exceptions.RequestException()) | |
722 | ||
723 | with pytest.raises(TemporaryCheckError): | |
724 | KodiIdleTime('foo', url='url', timeout=10, idle_time=42).check() | |
725 | ||
726 | ||
727 | class TestPing(CheckTest): | |
728 | ||
729 | def create_instance(self, name): | |
730 | return Ping(name, '8.8.8.8') | |
731 | ||
732 | def test_smoke(self, mocker): | |
733 | mock = mocker.patch('subprocess.call') | |
734 | mock.return_value = 1 | |
735 | ||
736 | hosts = ['abc', '129.123.145.42'] | |
737 | ||
738 | assert Ping('name', hosts).check() is None | |
739 | ||
740 | assert mock.call_count == len(hosts) | |
741 | for (args, _), host in zip(mock.call_args_list, hosts): | |
742 | assert args[0][-1] == host | |
743 | ||
744 | def test_matching(self, mocker): | |
745 | mock = mocker.patch('subprocess.call') | |
746 | mock.return_value = 0 | |
747 | assert Ping('name', ['foo']).check() is not None | |
748 | ||
749 | def test_create_missing_hosts(self): | |
750 | parser = configparser.ConfigParser() | |
751 | parser.read_string('''[section]''') | |
752 | with pytest.raises(ConfigurationError): | |
753 | Ping.create('name', parser['section']) | |
754 | ||
755 | def test_create_host_splitting(self): | |
756 | parser = configparser.ConfigParser() | |
757 | parser.read_string('''[section] | |
758 | hosts=a,b,c''') | |
759 | ping = Ping.create('name', parser['section']) | |
760 | assert ping._hosts == ['a', 'b', 'c'] | |
761 | ||
762 | ||
763 | class TestXIdleTime(CheckTest): | |
764 | ||
765 | def create_instance(self, name): | |
766 | return XIdleTime(name, 10, 'sockets', None, None) | |
767 | ||
768 | def test_create_default(self): | |
769 | parser = configparser.ConfigParser() | |
770 | parser.read_string('''[section]''') | |
771 | check = XIdleTime.create('name', parser['section']) | |
772 | assert check._timeout == 600 | |
773 | assert check._ignore_process_re == re.compile(r'a^') | |
774 | assert check._ignore_users_re == re.compile(r'a^') | |
775 | assert check._provide_sessions == check._list_sessions_sockets | |
776 | ||
777 | def test_create(self): | |
778 | parser = configparser.ConfigParser() | |
779 | parser.read_string('''[section] | |
780 | timeout = 42 | |
781 | ignore_if_process = .*test | |
782 | ignore_users = test.*test | |
783 | method = logind''') | |
784 | check = XIdleTime.create('name', parser['section']) | |
785 | assert check._timeout == 42 | |
786 | assert check._ignore_process_re == re.compile(r'.*test') | |
787 | assert check._ignore_users_re == re.compile(r'test.*test') | |
788 | assert check._provide_sessions == check._list_sessions_logind | |
789 | ||
790 | def test_create_no_int(self): | |
791 | parser = configparser.ConfigParser() | |
792 | parser.read_string('''[section] | |
793 | timeout = string''') | |
794 | with pytest.raises(ConfigurationError): | |
795 | XIdleTime.create('name', parser['section']) | |
796 | ||
797 | def test_create_broken_process_re(self): | |
798 | parser = configparser.ConfigParser() | |
799 | parser.read_string('''[section] | |
800 | ignore_if_process = [[a-9]''') | |
801 | with pytest.raises(ConfigurationError): | |
802 | XIdleTime.create('name', parser['section']) | |
803 | ||
804 | def test_create_broken_users_re(self): | |
805 | parser = configparser.ConfigParser() | |
806 | parser.read_string('''[section] | |
807 | ignore_users = [[a-9]''') | |
808 | with pytest.raises(ConfigurationError): | |
809 | XIdleTime.create('name', parser['section']) | |
810 | ||
811 | def test_create_unknown_method(self): | |
812 | parser = configparser.ConfigParser() | |
813 | parser.read_string('''[section] | |
814 | method = asdfasdf''') | |
815 | with pytest.raises(ConfigurationError): | |
816 | XIdleTime.create('name', parser['section']) | |
817 | ||
818 | def test_list_sessions_logind(self, mocker): | |
819 | mock = mocker.patch('autosuspend.checks.activity.list_logind_sessions') | |
820 | mock.return_value = [('c1', {'Name': 'foo'}), | |
821 | ('c2', {'Display': 'asdfasf'}), | |
822 | ('c3', {'Name': 'hello', 'Display': 'nonumber'}), | |
823 | ('c4', {'Name': 'hello', 'Display': '3'})] | |
824 | ||
825 | parser = configparser.ConfigParser() | |
826 | parser.read_string('''[section]''') | |
827 | check = XIdleTime.create('name', parser['section']) | |
828 | assert check._list_sessions_logind() == [(3, 'hello')] | |
829 | ||
830 | def test_list_sessions_socket(self, mocker): | |
831 | mock_glob = mocker.patch('glob.glob') | |
832 | mock_glob.return_value = ['/tmp/.X11-unix/X0', | |
833 | '/tmp/.X11-unix/X42', | |
834 | '/tmp/.X11-unix/Xnum'] | |
835 | ||
836 | stat_return = os.stat(os.path.realpath(__file__)) | |
837 | this_user = pwd.getpwuid(stat_return.st_uid) | |
838 | mock_stat = mocker.patch('os.stat') | |
839 | mock_stat.return_value = stat_return | |
840 | ||
841 | mock_pwd = mocker.patch('pwd.getpwuid') | |
842 | mock_pwd.return_value = this_user | |
843 | ||
844 | parser = configparser.ConfigParser() | |
845 | parser.read_string('''[section]''') | |
846 | check = XIdleTime.create('name', parser['section']) | |
847 | assert check._list_sessions_sockets() == [(0, this_user.pw_name), | |
848 | (42, this_user.pw_name)] | |
849 | ||
850 | ||
851 | class TestExternalCommand(CheckTest): | |
852 | ||
853 | def create_instance(self, name): | |
854 | return ExternalCommand(name, 'asdfasdf') | |
855 | ||
856 | def test_check(self, mocker): | |
857 | mock = mocker.patch('subprocess.check_call') | |
858 | parser = configparser.ConfigParser() | |
859 | parser.read_string('''[section] | |
860 | command = foo bar''') | |
861 | assert ExternalCommand.create( | |
862 | 'name', parser['section']).check() is not None | |
863 | mock.assert_called_once_with('foo bar', shell=True) | |
864 | ||
865 | def test_check_no_match(self, mocker): | |
866 | mock = mocker.patch('subprocess.check_call') | |
867 | mock.side_effect = subprocess.CalledProcessError(2, 'foo bar') | |
868 | parser = configparser.ConfigParser() | |
869 | parser.read_string('''[section] | |
870 | command = foo bar''') | |
871 | assert ExternalCommand.create( | |
872 | 'name', parser['section']).check() is None | |
873 | mock.assert_called_once_with('foo bar', shell=True) | |
874 | ||
875 | ||
876 | class TestXPath(CheckTest): | |
877 | ||
878 | def create_instance(self, name): | |
879 | return XPath(name=name, url='url', timeout=5, | |
880 | username='userx', password='pass', | |
881 | xpath='/b') | |
882 | ||
883 | def test_matching(self, mocker): | |
884 | mock_reply = mocker.MagicMock() | |
885 | content_property = mocker.PropertyMock() | |
886 | type(mock_reply).content = content_property | |
887 | content_property.return_value = "<a></a>" | |
888 | mock_method = mocker.patch('requests.Session.get', | |
889 | return_value=mock_reply) | |
890 | ||
891 | url = 'nourl' | |
892 | assert XPath('foo', xpath='/a', url=url, timeout=5).check() is not None | |
893 | ||
894 | mock_method.assert_called_once_with(url, timeout=5) | |
895 | content_property.assert_called_once_with() | |
896 | ||
897 | def test_not_matching(self, mocker): | |
898 | mock_reply = mocker.MagicMock() | |
899 | content_property = mocker.PropertyMock() | |
900 | type(mock_reply).content = content_property | |
901 | content_property.return_value = "<a></a>" | |
902 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
903 | ||
904 | assert XPath('foo', xpath='/b', url='nourl', timeout=5).check() is None | |
905 | ||
906 | def test_create(self): | |
907 | parser = configparser.ConfigParser() | |
908 | parser.read_string('''[section] | |
909 | url = url | |
910 | xpath = /xpath | |
911 | username = user | |
912 | password = pass | |
913 | timeout = 42''') | |
914 | check = XPath.create('name', parser['section']) | |
915 | assert check._xpath == '/xpath' | |
916 | assert check._url == 'url' | |
917 | assert check._username == 'user' | |
918 | assert check._password == 'pass' | |
919 | assert check._timeout == 42 | |
920 | ||
921 | def test_network_errors_are_passed(self, stub_auth_server): | |
922 | with pytest.raises(TemporaryCheckError): | |
923 | XPath(name='name', | |
924 | url=stub_auth_server.resource_address('data.txt'), | |
925 | timeout=5, username='userx', password='pass', | |
926 | xpath='/b').request() | |
927 | ||
928 | ||
929 | class TestLogindSessionsIdle(CheckTest): | |
930 | ||
931 | def create_instance(self, name): | |
932 | return LogindSessionsIdle( | |
933 | name, ['tty', 'x11', 'wayland'], ['active', 'online']) | |
934 | ||
935 | def test_smoke(self): | |
936 | check = LogindSessionsIdle( | |
937 | 'test', ['tty', 'x11', 'wayland'], ['active', 'online']) | |
938 | assert check._types == ['tty', 'x11', 'wayland'] | |
939 | assert check._states == ['active', 'online'] | |
940 | try: | |
941 | # only run the test if the dbus module is available (not on travis) | |
942 | import dbus # noqa: F401 | |
943 | check.check() | |
944 | except ImportError: | |
945 | pass | |
946 | ||
947 | def test_configure_defaults(self): | |
948 | parser = configparser.ConfigParser() | |
949 | parser.read_string('[section]') | |
950 | check = LogindSessionsIdle.create('name', parser['section']) | |
951 | assert check._types == ['tty', 'x11', 'wayland'] | |
952 | assert check._states == ['active', 'online'] | |
953 | ||
954 | def test_configure_types(self): | |
955 | parser = configparser.ConfigParser() | |
956 | parser.read_string('''[section] | |
957 | types=test, bla,foo''') | |
958 | check = LogindSessionsIdle.create('name', parser['section']) | |
959 | assert check._types == ['test', 'bla', 'foo'] | |
960 | ||
961 | def test_configure_states(self): | |
962 | parser = configparser.ConfigParser() | |
963 | parser.read_string('''[section] | |
964 | states=test, bla,foo''') | |
965 | check = LogindSessionsIdle.create('name', parser['section']) | |
966 | assert check._states == ['test', 'bla', 'foo'] |
0 | import configparser | |
1 | ||
2 | import pytest | |
3 | import requests | |
4 | ||
5 | from autosuspend.checks import (Activity, | |
6 | ConfigurationError, | |
7 | TemporaryCheckError) | |
8 | from autosuspend.checks.util import CommandMixin, NetworkMixin, XPathMixin | |
9 | ||
10 | ||
11 | class _CommandMixinSub(CommandMixin, Activity): | |
12 | ||
13 | def __init__(self, name, command): | |
14 | Activity.__init__(self, name) | |
15 | CommandMixin.__init__(self, command) | |
16 | ||
17 | def check(self): | |
18 | pass | |
19 | ||
20 | ||
21 | class TestCommandMixin: | |
22 | ||
23 | def test_create(self): | |
24 | parser = configparser.ConfigParser() | |
25 | parser.read_string('''[section] | |
26 | command = narf bla ''') | |
27 | check = _CommandMixinSub.create('name', parser['section']) | |
28 | assert check._command == 'narf bla' | |
29 | ||
30 | def test_create_no_command(self): | |
31 | parser = configparser.ConfigParser() | |
32 | parser.read_string('''[section]''') | |
33 | with pytest.raises(ConfigurationError): | |
34 | _CommandMixinSub.create('name', parser['section']) | |
35 | ||
36 | ||
37 | class TestNetworkMixin: | |
38 | ||
39 | def test_collect_missing_url(self): | |
40 | with pytest.raises(ConfigurationError, | |
41 | match=r"^Lacks 'url'.*"): | |
42 | parser = configparser.ConfigParser() | |
43 | parser.read_string('[section]') | |
44 | NetworkMixin.collect_init_args(parser['section']) | |
45 | ||
46 | def test_collect_default_timeout(self): | |
47 | parser = configparser.ConfigParser() | |
48 | parser.read_string('''[section] | |
49 | url=nourl''') | |
50 | args = NetworkMixin.collect_init_args(parser['section']) | |
51 | assert args['timeout'] == 5 | |
52 | ||
53 | def test_collect_timeout(self): | |
54 | parser = configparser.ConfigParser() | |
55 | parser.read_string('''[section] | |
56 | url=nourl | |
57 | timeout=42''') | |
58 | args = NetworkMixin.collect_init_args(parser['section']) | |
59 | assert args['timeout'] == 42 | |
60 | ||
61 | def test_collect_invalid_timeout(self): | |
62 | with pytest.raises(ConfigurationError, | |
63 | match=r"^Configuration error .*"): | |
64 | parser = configparser.ConfigParser() | |
65 | parser.read_string('''[section] | |
66 | url=nourl | |
67 | timeout=xx''') | |
68 | NetworkMixin.collect_init_args(parser['section']) | |
69 | ||
70 | def test_request(self, stub_server): | |
71 | address = stub_server.resource_address('xml_with_encoding.xml') | |
72 | reply = NetworkMixin(address, 5).request() | |
73 | assert reply is not None | |
74 | assert reply.status_code == 200 | |
75 | ||
76 | def test_requests_exception(self, mocker): | |
77 | with pytest.raises(TemporaryCheckError): | |
78 | mock_method = mocker.patch('requests.Session.get') | |
79 | mock_method.side_effect = requests.exceptions.ReadTimeout() | |
80 | ||
81 | NetworkMixin('url', timeout=5).request() | |
82 | ||
83 | def test_smoke(self, stub_server): | |
84 | response = NetworkMixin(stub_server.resource_address('data.txt'), | |
85 | timeout=5).request() | |
86 | assert response is not None | |
87 | assert response.text == 'iamhere\n' | |
88 | ||
89 | def test_exception_404(self, stub_server): | |
90 | with pytest.raises(TemporaryCheckError): | |
91 | NetworkMixin(stub_server.resource_address('doesnotexist'), | |
92 | timeout=5).request() | |
93 | ||
94 | def test_authentication(self, stub_auth_server): | |
95 | NetworkMixin(stub_auth_server.resource_address('data.txt'), | |
96 | 5, username='user', password='pass').request() | |
97 | ||
98 | def test_invalid_authentication(self, stub_auth_server): | |
99 | with pytest.raises(TemporaryCheckError): | |
100 | NetworkMixin(stub_auth_server.resource_address('data.txt'), | |
101 | 5, username='userx', password='pass').request() | |
102 | ||
103 | def test_file_url(self): | |
104 | NetworkMixin('file://' + __file__, 5).request() | |
105 | ||
106 | ||
107 | class _XPathMixinSub(XPathMixin, Activity): | |
108 | ||
109 | def __init__(self, name, **kwargs): | |
110 | Activity.__init__(self, name) | |
111 | XPathMixin.__init__(self, **kwargs) | |
112 | ||
113 | def check(self): | |
114 | pass | |
115 | ||
116 | ||
117 | class TestXPathMixin: | |
118 | ||
119 | def test_smoke(self, stub_server): | |
120 | address = stub_server.resource_address('xml_with_encoding.xml') | |
121 | result = _XPathMixinSub( | |
122 | 'foo', xpath='/b', url=address, timeout=5).evaluate() | |
123 | assert result is not None | |
124 | assert len(result) == 0 | |
125 | ||
126 | def test_broken_xml(self, mocker): | |
127 | with pytest.raises(TemporaryCheckError): | |
128 | mock_reply = mocker.MagicMock() | |
129 | content_property = mocker.PropertyMock() | |
130 | type(mock_reply).content = content_property | |
131 | content_property.return_value = b"//broken" | |
132 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
133 | ||
134 | _XPathMixinSub( | |
135 | 'foo', xpath='/b', url='nourl', timeout=5).evaluate() | |
136 | ||
137 | def test_xml_with_encoding(self, mocker): | |
138 | mock_reply = mocker.MagicMock() | |
139 | content_property = mocker.PropertyMock() | |
140 | type(mock_reply).content = content_property | |
141 | content_property.return_value = \ | |
142 | b"""<?xml version="1.0" encoding="ISO-8859-1" ?> | |
143 | <root></root>""" | |
144 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
145 | ||
146 | _XPathMixinSub('foo', xpath='/b', url='nourl', timeout=5).evaluate() | |
147 | ||
148 | def test_xpath_prevalidation(self): | |
149 | with pytest.raises(ConfigurationError, | |
150 | match=r'^Invalid xpath.*'): | |
151 | parser = configparser.ConfigParser() | |
152 | parser.read_string('''[section] | |
153 | xpath=|34/ad | |
154 | url=nourl''') | |
155 | _XPathMixinSub.create('name', parser['section']) | |
156 | ||
157 | @pytest.mark.parametrize('entry,', ['xpath', 'url']) | |
158 | def test_missing_config_entry(self, entry): | |
159 | with pytest.raises(ConfigurationError, | |
160 | match=r"^Lacks '" + entry + "'.*"): | |
161 | parser = configparser.ConfigParser() | |
162 | parser.read_string('''[section] | |
163 | xpath=/valid | |
164 | url=nourl''') | |
165 | del parser['section'][entry] | |
166 | _XPathMixinSub.create('name', parser['section']) |
0 | import configparser | |
1 | from datetime import datetime, timedelta, timezone | |
2 | import subprocess | |
3 | ||
4 | import dateutil.parser | |
5 | import pytest | |
6 | ||
7 | from autosuspend.checks import ConfigurationError, TemporaryCheckError | |
8 | from autosuspend.checks.wakeup import (Calendar, | |
9 | Command, | |
10 | File, | |
11 | Periodic, | |
12 | XPath, | |
13 | XPathDelta) | |
14 | from . import CheckTest | |
15 | ||
16 | ||
17 | class TestCalendar(CheckTest): | |
18 | ||
19 | def create_instance(self, name): | |
20 | return Calendar(name, url='file:///asdf', timeout=3) | |
21 | ||
22 | def test_create(self): | |
23 | parser = configparser.ConfigParser() | |
24 | parser.read_string('''[section] | |
25 | url = url | |
26 | username = user | |
27 | password = pass | |
28 | timeout = 42''') | |
29 | check = Calendar.create('name', parser['section']) | |
30 | assert check._url == 'url' | |
31 | assert check._username == 'user' | |
32 | assert check._password == 'pass' | |
33 | assert check._timeout == 42 | |
34 | ||
35 | def test_empty(self, stub_server): | |
36 | address = stub_server.resource_address('old-event.ics') | |
37 | timestamp = dateutil.parser.parse('20050605T130000Z') | |
38 | assert Calendar( | |
39 | 'test', url=address, timeout=3).check(timestamp) is None | |
40 | ||
41 | def test_smoke(self, stub_server): | |
42 | address = stub_server.resource_address('old-event.ics') | |
43 | timestamp = dateutil.parser.parse('20040605T090000Z') | |
44 | desired_start = dateutil.parser.parse('20040605T110000Z') | |
45 | assert Calendar( | |
46 | 'test', url=address, timeout=3).check(timestamp) == desired_start | |
47 | ||
48 | def test_ignore_running(self, stub_server): | |
49 | address = stub_server.resource_address('old-event.ics') | |
50 | timestamp = dateutil.parser.parse('20040605T120000Z') | |
51 | assert Calendar( | |
52 | 'test', url=address, timeout=3).check(timestamp) is None | |
53 | ||
54 | ||
55 | class TestFile(CheckTest): | |
56 | ||
57 | def create_instance(self, name): | |
58 | return File(name, 'asdf') | |
59 | ||
60 | def test_create(self): | |
61 | parser = configparser.ConfigParser() | |
62 | parser.read_string('''[section] | |
63 | path = /tmp/test''') | |
64 | check = File.create('name', parser['section']) | |
65 | assert check._path == '/tmp/test' | |
66 | ||
67 | def test_create_no_path(self): | |
68 | parser = configparser.ConfigParser() | |
69 | parser.read_string('''[section]''') | |
70 | with pytest.raises(ConfigurationError): | |
71 | File.create('name', parser['section']) | |
72 | ||
73 | def test_smoke(self, tmpdir): | |
74 | file = tmpdir.join('file') | |
75 | file.write('42\n\n') | |
76 | assert File('name', str(file)).check( | |
77 | datetime.now(timezone.utc)) == datetime.fromtimestamp( | |
78 | 42, timezone.utc) | |
79 | ||
80 | def test_no_file(self, tmpdir): | |
81 | assert File('name', str(tmpdir.join('narf'))).check( | |
82 | datetime.now(timezone.utc)) is None | |
83 | ||
84 | def test_invalid_number(self, tmpdir): | |
85 | file = tmpdir.join('filexxx') | |
86 | file.write('nonumber\n\n') | |
87 | with pytest.raises(TemporaryCheckError): | |
88 | File('name', str(file)).check(datetime.now(timezone.utc)) | |
89 | ||
90 | ||
91 | class TestCommand(CheckTest): | |
92 | ||
93 | def create_instance(self, name): | |
94 | return Command(name, 'asdf') | |
95 | ||
96 | def test_smoke(self): | |
97 | check = Command('test', 'echo 1234') | |
98 | assert check.check( | |
99 | datetime.now(timezone.utc)) == datetime.fromtimestamp( | |
100 | 1234, timezone.utc) | |
101 | ||
102 | def test_no_output(self): | |
103 | check = Command('test', 'echo') | |
104 | assert check.check(datetime.now(timezone.utc)) is None | |
105 | ||
106 | def test_not_parseable(self): | |
107 | check = Command('test', 'echo asdfasdf') | |
108 | with pytest.raises(TemporaryCheckError): | |
109 | check.check(datetime.now(timezone.utc)) | |
110 | ||
111 | def test_multiple_lines(self, mocker): | |
112 | mock = mocker.patch('subprocess.check_output') | |
113 | mock.return_value = '1234\nignore\n' | |
114 | check = Command('test', 'echo bla') | |
115 | assert check.check( | |
116 | datetime.now(timezone.utc)) == datetime.fromtimestamp( | |
117 | 1234, timezone.utc) | |
118 | ||
119 | def test_multiple_lines_but_empty(self, mocker): | |
120 | mock = mocker.patch('subprocess.check_output') | |
121 | mock.return_value = ' \nignore\n' | |
122 | check = Command('test', 'echo bla') | |
123 | assert check.check(datetime.now(timezone.utc)) is None | |
124 | ||
125 | def test_process_error(self, mocker): | |
126 | mock = mocker.patch('subprocess.check_output') | |
127 | mock.side_effect = subprocess.CalledProcessError(2, 'foo bar') | |
128 | check = Command('test', 'echo bla') | |
129 | with pytest.raises(TemporaryCheckError): | |
130 | check.check(datetime.now(timezone.utc)) | |
131 | ||
132 | ||
133 | class TestPeriodic(CheckTest): | |
134 | ||
135 | def create_instance(self, name): | |
136 | delta = timedelta(seconds=10, minutes=42) | |
137 | return Periodic(name, delta) | |
138 | ||
139 | def test_create(self): | |
140 | parser = configparser.ConfigParser() | |
141 | parser.read_string('''[section] | |
142 | unit=seconds | |
143 | value=13''') | |
144 | check = Periodic.create('name', parser['section']) | |
145 | assert check._delta == timedelta(seconds=13) | |
146 | ||
147 | def test_create_wrong_unit(self): | |
148 | parser = configparser.ConfigParser() | |
149 | parser.read_string('''[section] | |
150 | unit=asdfasdf | |
151 | value=13''') | |
152 | with pytest.raises(ConfigurationError): | |
153 | Periodic.create('name', parser['section']) | |
154 | ||
155 | def test_create_not_numeric(self): | |
156 | parser = configparser.ConfigParser() | |
157 | parser.read_string('''[section] | |
158 | unit=seconds | |
159 | value=asdfasd''') | |
160 | with pytest.raises(ConfigurationError): | |
161 | Periodic.create('name', parser['section']) | |
162 | ||
163 | def test_create_float(self): | |
164 | parser = configparser.ConfigParser() | |
165 | parser.read_string('''[section] | |
166 | unit=seconds | |
167 | value=21312.12''') | |
168 | Periodic.create('name', parser['section']) | |
169 | ||
170 | def test_check(self): | |
171 | delta = timedelta(seconds=10, minutes=42) | |
172 | check = Periodic('test', delta) | |
173 | now = datetime.now(timezone.utc) | |
174 | assert check.check(now) == now + delta | |
175 | ||
176 | ||
177 | class TestXPath(CheckTest): | |
178 | ||
179 | def create_instance(self, name): | |
180 | return XPath(name, xpath='/a', url='nourl', timeout=5) | |
181 | ||
182 | def test_matching(self, mocker): | |
183 | mock_reply = mocker.MagicMock() | |
184 | content_property = mocker.PropertyMock() | |
185 | type(mock_reply).content = content_property | |
186 | content_property.return_value = '<a value="42.3"></a>' | |
187 | mock_method = mocker.patch('requests.Session.get', | |
188 | return_value=mock_reply) | |
189 | ||
190 | url = 'nourl' | |
191 | assert XPath( | |
192 | 'foo', xpath='/a/@value', url=url, timeout=5).check( | |
193 | datetime.now(timezone.utc)) == datetime.fromtimestamp( | |
194 | 42.3, timezone.utc) | |
195 | ||
196 | mock_method.assert_called_once_with(url, timeout=5) | |
197 | content_property.assert_called_once_with() | |
198 | ||
199 | def test_not_matching(self, mocker): | |
200 | mock_reply = mocker.MagicMock() | |
201 | content_property = mocker.PropertyMock() | |
202 | type(mock_reply).content = content_property | |
203 | content_property.return_value = "<a></a>" | |
204 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
205 | ||
206 | assert XPath('foo', xpath='/b', url='nourl', timeout=5).check( | |
207 | datetime.now(timezone.utc)) is None | |
208 | ||
209 | def test_not_a_string(self, mocker): | |
210 | mock_reply = mocker.MagicMock() | |
211 | content_property = mocker.PropertyMock() | |
212 | type(mock_reply).content = content_property | |
213 | content_property.return_value = "<a></a>" | |
214 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
215 | ||
216 | with pytest.raises(TemporaryCheckError): | |
217 | XPath('foo', xpath='/a', url='nourl', timeout=5).check( | |
218 | datetime.now(timezone.utc)) | |
219 | ||
220 | def test_not_a_number(self, mocker): | |
221 | mock_reply = mocker.MagicMock() | |
222 | content_property = mocker.PropertyMock() | |
223 | type(mock_reply).content = content_property | |
224 | content_property.return_value = '<a value="narf"></a>' | |
225 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
226 | ||
227 | with pytest.raises(TemporaryCheckError): | |
228 | XPath('foo', xpath='/a/@value', url='nourl', timeout=5).check( | |
229 | datetime.now(timezone.utc)) | |
230 | ||
231 | def test_multiple_min(self, mocker): | |
232 | mock_reply = mocker.MagicMock() | |
233 | content_property = mocker.PropertyMock() | |
234 | type(mock_reply).content = content_property | |
235 | content_property.return_value = '''<root> | |
236 | <a value="40"></a> | |
237 | <a value="10"></a> | |
238 | <a value="20"></a> | |
239 | </root> | |
240 | ''' | |
241 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
242 | ||
243 | assert XPath( | |
244 | 'foo', xpath='//a/@value', url='nourl', timeout=5).check( | |
245 | datetime.now(timezone.utc)) == datetime.fromtimestamp( | |
246 | 10, timezone.utc) | |
247 | ||
248 | def test_create(self): | |
249 | parser = configparser.ConfigParser() | |
250 | parser.read_string('''[section] | |
251 | xpath=/valid | |
252 | url=nourl | |
253 | timeout=20''') | |
254 | check = XPath.create('name', parser['section']) | |
255 | assert check._xpath == '/valid' | |
256 | ||
257 | ||
258 | class TestXPathDelta(CheckTest): | |
259 | ||
260 | def create_instance(self, name): | |
261 | return XPathDelta(name, xpath='/a', url='nourl', timeout=5, | |
262 | unit='days') | |
263 | ||
264 | @pytest.mark.parametrize("unit,factor", [ | |
265 | ('microseconds', 0.000001), | |
266 | ('milliseconds', 0.001), | |
267 | ('seconds', 1), | |
268 | ('minutes', 60), | |
269 | ('hours', 60 * 60), | |
270 | ('days', 60 * 60 * 24), | |
271 | ('weeks', 60 * 60 * 24 * 7), | |
272 | ]) | |
273 | def test_smoke(self, mocker, unit, factor): | |
274 | mock_reply = mocker.MagicMock() | |
275 | content_property = mocker.PropertyMock() | |
276 | type(mock_reply).content = content_property | |
277 | content_property.return_value = '<a value="42"></a>' | |
278 | mocker.patch('requests.Session.get', return_value=mock_reply) | |
279 | ||
280 | url = 'nourl' | |
281 | now = datetime.now(timezone.utc) | |
282 | result = XPathDelta( | |
283 | 'foo', xpath='/a/@value', url=url, timeout=5, unit=unit).check(now) | |
284 | assert result == now + timedelta(seconds=42) * factor | |
285 | ||
286 | def test_create(self): | |
287 | parser = configparser.ConfigParser() | |
288 | parser.read_string('''[section] | |
289 | xpath=/valid | |
290 | url=nourl | |
291 | timeout=20 | |
292 | unit=weeks''') | |
293 | check = XPathDelta.create('name', parser['section']) | |
294 | assert check._unit == 'weeks' | |
295 | ||
296 | def test_init_wrong_unit(self): | |
297 | with pytest.raises(ValueError): | |
298 | XPathDelta('name', url='url', xpath='/a', timeout=5, | |
299 | unit='unknownunit') |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VEVENT | |
4 | CREATED:20180601T194043Z | |
5 | LAST-MODIFIED:20180601T194050Z | |
6 | DTSTAMP:20180601T194050Z | |
7 | UID:0f82aa78-1478-4093-85c5-16d754f362f6 | |
8 | SUMMARY:between | |
9 | DTSTART;VALUE=DATE:20180613 | |
10 | DTEND;VALUE=DATE:20180615 | |
11 | TRANSP:TRANSPARENT | |
12 | END:VEVENT | |
13 | BEGIN:VEVENT | |
14 | CREATED:20180601T194002Z | |
15 | LAST-MODIFIED:20180601T194303Z | |
16 | DTSTAMP:20180601T194303Z | |
17 | UID:630f3b71-865e-4125-977d-a2fd0009ce7d | |
18 | SUMMARY:start | |
19 | DTSTART;VALUE=DATE:20180609 | |
20 | DTEND;VALUE=DATE:20180612 | |
21 | TRANSP:TRANSPARENT | |
22 | X-MOZ-GENERATION:1 | |
23 | END:VEVENT | |
24 | BEGIN:VEVENT | |
25 | CREATED:20180601T194054Z | |
26 | LAST-MODIFIED:20180601T194307Z | |
27 | DTSTAMP:20180601T194307Z | |
28 | UID:dc1c0bfc-633c-4d34-8de4-f6e9bcdb5fc6 | |
29 | SUMMARY:end | |
30 | DTSTART;VALUE=DATE:20180617 | |
31 | DTEND;VALUE=DATE:20180620 | |
32 | TRANSP:TRANSPARENT | |
33 | X-MOZ-GENERATION:1 | |
34 | END:VEVENT | |
35 | BEGIN:VEVENT | |
36 | CREATED:20180601T194313Z | |
37 | LAST-MODIFIED:20180601T194317Z | |
38 | DTSTAMP:20180601T194317Z | |
39 | UID:5095407e-5e63-4609-93a0-5dcd45ed5bf5 | |
40 | SUMMARY:after | |
41 | DTSTART;VALUE=DATE:20180619 | |
42 | DTEND;VALUE=DATE:20180620 | |
43 | TRANSP:TRANSPARENT | |
44 | END:VEVENT | |
45 | BEGIN:VEVENT | |
46 | CREATED:20180601T195811Z | |
47 | LAST-MODIFIED:20180601T195814Z | |
48 | DTSTAMP:20180601T195814Z | |
49 | UID:550119de-eef7-4820-9843-d260515807d2 | |
50 | SUMMARY:before | |
51 | DTSTART;VALUE=DATE:20180605 | |
52 | DTEND;VALUE=DATE:20180606 | |
53 | TRANSP:TRANSPARENT | |
54 | END:VEVENT | |
55 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VEVENT | |
4 | CREATED:20180627T111330Z | |
5 | LAST-MODIFIED:20180627T111340Z | |
6 | DTSTAMP:20180627T111340Z | |
7 | UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369 | |
8 | SUMMARY:single all day | |
9 | DTSTART;VALUE=DATE:20180625 | |
10 | DTEND;VALUE=DATE:20180626 | |
11 | EXDATE:20180630 | |
12 | RRULE:FREQ=DAILY | |
13 | TRANSP:TRANSPARENT | |
14 | END:VEVENT | |
15 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VEVENT | |
4 | CREATED:20180627T111330Z | |
5 | LAST-MODIFIED:20180627T111340Z | |
6 | DTSTAMP:20180627T111340Z | |
7 | UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369 | |
8 | SUMMARY:single all day | |
9 | DTSTART;VALUE=DATE:20180625 | |
10 | DTEND;VALUE=DATE:20180626 | |
11 | RRULE:FREQ=DAILY | |
12 | TRANSP:TRANSPARENT | |
13 | END:VEVENT | |
14 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VEVENT | |
4 | CREATED:20180627T111330Z | |
5 | LAST-MODIFIED:20180627T111340Z | |
6 | DTSTAMP:20180627T111340Z | |
7 | UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369 | |
8 | SUMMARY:single all day | |
9 | DTSTART;VALUE=DATE:20180625 | |
10 | DTEND;VALUE=DATE:20180626 | |
11 | TRANSP:TRANSPARENT | |
12 | END:VEVENT | |
13 | BEGIN:VEVENT | |
14 | CREATED:20180627T111347Z | |
15 | LAST-MODIFIED:20180627T111357Z | |
16 | DTSTAMP:20180627T111357Z | |
17 | UID:a2dab4dd-1ede-4733-af8e-90cff0e26f79 | |
18 | SUMMARY:two all days | |
19 | DTSTART;VALUE=DATE:20180628 | |
20 | DTEND;VALUE=DATE:20180630 | |
21 | TRANSP:TRANSPARENT | |
22 | BEGIN:VALARM | |
23 | ACTION:DISPLAY | |
24 | TRIGGER;VALUE=DURATION:-PT15M | |
25 | DESCRIPTION:Default Mozilla Description | |
26 | END:VALARM | |
27 | END:VEVENT | |
28 | END:VCALENDAR |
0 | iamhere |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at | |
5 | woke_up_file = @TMPDIR@/test-woke-up | |
6 | ||
7 | [check.ExternalCommand] | |
8 | enabled = True | |
9 | command = true |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180602T160606Z | |
22 | LAST-MODIFIED:20180602T160632Z | |
23 | DTSTAMP:20180602T160632Z | |
24 | UID:a40c5b76-e3f5-4259-92f5-26692f99f131 | |
25 | SUMMARY:recurring | |
26 | RRULE:FREQ=DAILY;UNTIL=20180617T120000Z | |
27 | EXDATE:20180614T120000Z | |
28 | DTSTART;TZID=Europe/Berlin:20180611T140000 | |
29 | DTEND;TZID=Europe/Berlin:20180611T160000 | |
30 | TRANSP:OPAQUE | |
31 | X-MOZ-GENERATION:4 | |
32 | SEQUENCE:2 | |
33 | END:VEVENT | |
34 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VEVENT | |
4 | CREATED:20180602T151629Z | |
5 | LAST-MODIFIED:20180602T152512Z | |
6 | DTSTAMP:20180602T152512Z | |
7 | UID:f0028400-24e2-4f10-81a0-032372781443 | |
8 | SUMMARY:floating | |
9 | DTSTART:20180610T150000 | |
10 | DTEND:20180610T170000 | |
11 | TRANSP:OPAQUE | |
12 | SEQUENCE:5 | |
13 | X-MOZ-GENERATION:3 | |
14 | END:VEVENT | |
15 | BEGIN:VEVENT | |
16 | CREATED:20180602T151701Z | |
17 | LAST-MODIFIED:20180602T152732Z | |
18 | DTSTAMP:20180602T152732Z | |
19 | UID:0ef23894-702e-40ac-ab09-94fa8c9c51fd | |
20 | SUMMARY:floating recurring | |
21 | RRULE:FREQ=DAILY | |
22 | DTSTART:20180612T180000 | |
23 | DTEND:20180612T200000 | |
24 | TRANSP:OPAQUE | |
25 | X-MOZ-GENERATION:5 | |
26 | SEQUENCE:3 | |
27 | END:VEVENT | |
28 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Inverse inc./SOGo 4.0.0//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | X-LIC-LOCATION:Europe/Berlin | |
6 | BEGIN:DAYLIGHT | |
7 | TZOFFSETFROM:+0100 | |
8 | TZOFFSETTO:+0200 | |
9 | TZNAME:CEST | |
10 | DTSTART:19700329T020000 | |
11 | RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU | |
12 | END:DAYLIGHT | |
13 | BEGIN:STANDARD | |
14 | TZOFFSETFROM:+0200 | |
15 | TZOFFSETTO:+0100 | |
16 | TZNAME:CET | |
17 | DTSTART:19701025T030000 | |
18 | RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU | |
19 | END:STANDARD | |
20 | END:VTIMEZONE | |
21 | BEGIN:VEVENT | |
22 | UID:2C-5B315480-3-4D014C80 | |
23 | SUMMARY:StayAlive | |
24 | LOCATION:Home | |
25 | CLASS:PUBLIC | |
26 | X-SOGO-SEND-APPOINTMENT-NOTIFICATIONS:NO | |
27 | RRULE:FREQ=DAILY | |
28 | TRANSP:OPAQUE | |
29 | DTSTART;TZID=Europe/Berlin:20180626T170000 | |
30 | DTEND;TZID=Europe/Berlin:20180626T210000 | |
31 | CREATED:20180625T204700Z | |
32 | DTSTAMP:20180625T204700Z | |
33 | LAST-MODIFIED:20180625T204700Z | |
34 | END:VEVENT | |
35 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180603T194125Z | |
22 | LAST-MODIFIED:20180603T194144Z | |
23 | DTSTAMP:20180603T194144Z | |
24 | UID:6ff13ee1-e548-41b1-8e08-d7725423743a | |
25 | SUMMARY:long-event | |
26 | DTSTART;TZID=Europe/Berlin:20160605T130000 | |
27 | DTEND;TZID=Europe/Berlin:20260605T150000 | |
28 | TRANSP:OPAQUE | |
29 | SEQUENCE:1 | |
30 | END:VEVENT | |
31 | END:VCALENDAR |
0 | [general] | |
1 | interval = 5 | |
2 | idle_time = 900 | |
3 | suspend_cmd = /usr/bin/systemctl suspend | |
4 | wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm | |
5 | woke_up_file = /var/run/autosuspend-just-woke-up | |
6 | ||
7 | [check.Ping] | |
8 | enabled = true | |
9 | hosts = localhost |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180601T200433Z | |
22 | LAST-MODIFIED:20180601T200455Z | |
23 | DTSTAMP:20180601T200455Z | |
24 | UID:1c056498-9c83-4e0f-bb77-777c967c9a54 | |
25 | SUMMARY:before include | |
26 | DTSTART;TZID=Europe/Berlin:20180603T210000 | |
27 | DTEND;TZID=Europe/Berlin:20180604T020000 | |
28 | TRANSP:OPAQUE | |
29 | X-MOZ-GENERATION:2 | |
30 | SEQUENCE:1 | |
31 | END:VEVENT | |
32 | BEGIN:VEVENT | |
33 | CREATED:20180601T200328Z | |
34 | LAST-MODIFIED:20180601T200511Z | |
35 | DTSTAMP:20180601T200511Z | |
36 | UID:db4b1c02-6ac2-4def-bfb0-9a96b510387e | |
37 | SUMMARY:direct start | |
38 | DTSTART;TZID=Europe/Berlin:20180604T000000 | |
39 | DTEND;TZID=Europe/Berlin:20180604T030000 | |
40 | TRANSP:OPAQUE | |
41 | X-MOZ-GENERATION:2 | |
42 | SEQUENCE:1 | |
43 | END:VEVENT | |
44 | BEGIN:VEVENT | |
45 | CREATED:20180601T200518Z | |
46 | LAST-MODIFIED:20180601T200531Z | |
47 | DTSTAMP:20180601T200531Z | |
48 | UID:06622f56-d945-490b-9fd7-0fe5015f3188 | |
49 | SUMMARY:in between | |
50 | DTSTART;TZID=Europe/Berlin:20180607T040000 | |
51 | DTEND;TZID=Europe/Berlin:20180607T090000 | |
52 | TRANSP:OPAQUE | |
53 | X-MOZ-GENERATION:1 | |
54 | END:VEVENT | |
55 | BEGIN:VEVENT | |
56 | CREATED:20180601T200351Z | |
57 | LAST-MODIFIED:20180601T200555Z | |
58 | DTSTAMP:20180601T200555Z | |
59 | UID:48d1debe-e457-4bde-9bea-ab18be136d4a | |
60 | SUMMARY:before do not include | |
61 | DTSTART;TZID=Europe/Berlin:20180603T220000 | |
62 | DTEND;TZID=Europe/Berlin:20180604T000000 | |
63 | TRANSP:OPAQUE | |
64 | X-MOZ-GENERATION:4 | |
65 | SEQUENCE:2 | |
66 | END:VEVENT | |
67 | BEGIN:VEVENT | |
68 | CREATED:20180601T200531Z | |
69 | LAST-MODIFIED:20180601T200615Z | |
70 | DTSTAMP:20180601T200615Z | |
71 | UID:0a36a2e8-fac3-4337-8464-f52e5cf17bd5 | |
72 | SUMMARY:direct end | |
73 | DTSTART;TZID=Europe/Berlin:20180610T220000 | |
74 | DTEND;TZID=Europe/Berlin:20180611T000000 | |
75 | TRANSP:OPAQUE | |
76 | X-MOZ-GENERATION:4 | |
77 | SEQUENCE:1 | |
78 | END:VEVENT | |
79 | BEGIN:VEVENT | |
80 | CREATED:20180601T200619Z | |
81 | LAST-MODIFIED:20180601T200633Z | |
82 | DTSTAMP:20180601T200633Z | |
83 | UID:19bf0d84-3286-44d8-8376-67549a419001 | |
84 | SUMMARY:end overlap | |
85 | DTSTART;TZID=Europe/Berlin:20180610T210000 | |
86 | DTEND;TZID=Europe/Berlin:20180611T020000 | |
87 | TRANSP:OPAQUE | |
88 | X-MOZ-GENERATION:2 | |
89 | SEQUENCE:1 | |
90 | END:VEVENT | |
91 | BEGIN:VEVENT | |
92 | CREATED:20180601T200643Z | |
93 | LAST-MODIFIED:20180601T200651Z | |
94 | DTSTAMP:20180601T200651Z | |
95 | UID:ae376911-eab5-45fe-bb5b-14e9fd904b44 | |
96 | SUMMARY:end after | |
97 | DTSTART;TZID=Europe/Berlin:20180611T000000 | |
98 | DTEND;TZID=Europe/Berlin:20180611T030000 | |
99 | TRANSP:OPAQUE | |
100 | X-MOZ-GENERATION:1 | |
101 | END:VEVENT | |
102 | BEGIN:VEVENT | |
103 | CREATED:20180602T144323Z | |
104 | LAST-MODIFIED:20180602T144338Z | |
105 | DTSTAMP:20180602T144338Z | |
106 | UID:f52ee7b1-810f-4b08-bf28-80e8ae226ac3 | |
107 | SUMMARY:overlapping | |
108 | DTSTART;TZID=Europe/Berlin:20180602T200000 | |
109 | DTEND;TZID=Europe/Berlin:20180612T230000 | |
110 | TRANSP:OPAQUE | |
111 | X-MOZ-GENERATION:2 | |
112 | SEQUENCE:1 | |
113 | END:VEVENT | |
114 | END:VCALENDAR |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at | |
5 | notify_cmd_wakeup = echo {timestamp:.0f} > @TMPDIR@/notify | |
6 | notify_cmd_no_wakeup = touch @TMPDIR@/notify | |
7 | woke_up_file = @TMPDIR@/test-woke-up | |
8 | ||
9 | [check.ExternalCommand] | |
10 | enabled = True | |
11 | command = false |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at | |
5 | notify_cmd_wakeup = echo {timestamp:.0f} > @TMPDIR@/notify | |
6 | notify_cmd_no_wakeup = touch @TMPDIR@/notify | |
7 | woke_up_file = @TMPDIR@/test-woke-up | |
8 | wakeup_delta = 10 | |
9 | ||
10 | [check.ExternalCommand] | |
11 | enabled = True | |
12 | command = false | |
13 | ||
14 | [wakeup.File] | |
15 | enabled = True | |
16 | path = @TMPDIR@/wakeup_time |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180603T194125Z | |
22 | LAST-MODIFIED:20180603T194144Z | |
23 | DTSTAMP:20180603T194144Z | |
24 | UID:6ff13ee1-e548-41b1-8e08-d7725423743a | |
25 | SUMMARY:long-event | |
26 | DTSTART;TZID=Europe/Berlin:20040605T130000 | |
27 | DTEND;TZID=Europe/Berlin:20040605T150000 | |
28 | TRANSP:OPAQUE | |
29 | SEQUENCE:1 | |
30 | END:VEVENT | |
31 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180603T200159Z | |
22 | LAST-MODIFIED:20180603T200414Z | |
23 | DTSTAMP:20180603T200414Z | |
24 | UID:d083699e-6f37-4a85-b20d-f03750aa6691 | |
25 | SUMMARY:recurring | |
26 | RRULE:FREQ=DAILY | |
27 | EXDATE:20181214T130000Z | |
28 | DTSTART;TZID=Europe/Berlin:20180606T140000 | |
29 | DTEND;TZID=Europe/Berlin:20180606T160000 | |
30 | TRANSP:OPAQUE | |
31 | X-MOZ-GENERATION:4 | |
32 | SEQUENCE:2 | |
33 | END:VEVENT | |
34 | BEGIN:VEVENT | |
35 | CREATED:20180603T200213Z | |
36 | LAST-MODIFIED:20180603T200243Z | |
37 | DTSTAMP:20180603T200243Z | |
38 | UID:d083699e-6f37-4a85-b20d-f03750aa6691 | |
39 | SUMMARY:recurring | |
40 | RECURRENCE-ID;TZID=Europe/Berlin:20180612T140000 | |
41 | DTSTART;TZID=Europe/Berlin:20180612T140000 | |
42 | DTEND;TZID=Europe/Berlin:20180612T160000 | |
43 | SEQUENCE:5 | |
44 | TRANSP:OPAQUE | |
45 | X-MOZ-GENERATION:4 | |
46 | END:VEVENT | |
47 | BEGIN:VEVENT | |
48 | CREATED:20180603T200401Z | |
49 | LAST-MODIFIED:20180603T200407Z | |
50 | DTSTAMP:20180603T200407Z | |
51 | UID:d083699e-6f37-4a85-b20d-f03750aa6691 | |
52 | SUMMARY:recurring | |
53 | RECURRENCE-ID;TZID=Europe/Berlin:20181212T140000 | |
54 | DTSTART;TZID=Europe/Berlin:20181212T110000 | |
55 | DTEND;TZID=Europe/Berlin:20181212T130000 | |
56 | SEQUENCE:2 | |
57 | TRANSP:OPAQUE | |
58 | X-MOZ-GENERATION:4 | |
59 | END:VEVENT | |
60 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180601T182719Z | |
22 | LAST-MODIFIED:20180601T182803Z | |
23 | DTSTAMP:20180601T182803Z | |
24 | UID:74c93379-f763-439b-9d11-eca4d431bfc7 | |
25 | SUMMARY:Stay awake | |
26 | RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR | |
27 | DTSTART;TZID=Europe/Berlin:20180327T090000 | |
28 | DTEND;TZID=Europe/Berlin:20180327T180000 | |
29 | TRANSP:OPAQUE | |
30 | X-MOZ-GENERATION:2 | |
31 | SEQUENCE:1 | |
32 | END:VEVENT | |
33 | END:VCALENDAR |
0 | BEGIN:VCALENDAR | |
1 | PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN | |
2 | VERSION:2.0 | |
3 | BEGIN:VTIMEZONE | |
4 | TZID:Europe/Berlin | |
5 | BEGIN:DAYLIGHT | |
6 | TZOFFSETFROM:+0100 | |
7 | TZOFFSETTO:+0200 | |
8 | TZNAME:CEST | |
9 | DTSTART:19700329T020000 | |
10 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 | |
11 | END:DAYLIGHT | |
12 | BEGIN:STANDARD | |
13 | TZOFFSETFROM:+0200 | |
14 | TZOFFSETTO:+0100 | |
15 | TZNAME:CET | |
16 | DTSTART:19701025T030000 | |
17 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 | |
18 | END:STANDARD | |
19 | END:VTIMEZONE | |
20 | BEGIN:VEVENT | |
21 | CREATED:20180603T194125Z | |
22 | LAST-MODIFIED:20180603T194144Z | |
23 | DTSTAMP:20180603T194144Z | |
24 | UID:6ff13ee1-e548-41b1-8e08-d7725423743a | |
25 | SUMMARY:recurring | |
26 | RRULE:FREQ=DAILY | |
27 | DTSTART;TZID=Europe/Berlin:20180605T130000 | |
28 | DTEND;TZID=Europe/Berlin:20180605T150000 | |
29 | TRANSP:OPAQUE | |
30 | X-MOZ-GENERATION:4 | |
31 | SEQUENCE:1 | |
32 | END:VEVENT | |
33 | BEGIN:VEVENT | |
34 | CREATED:20180603T194138Z | |
35 | LAST-MODIFIED:20180603T194140Z | |
36 | DTSTAMP:20180603T194140Z | |
37 | UID:6ff13ee1-e548-41b1-8e08-d7725423743a | |
38 | SUMMARY:recurring | |
39 | RECURRENCE-ID;TZID=Europe/Berlin:20180613T130000 | |
40 | DTSTART;TZID=Europe/Berlin:20180613T160000 | |
41 | DTEND;TZID=Europe/Berlin:20180613T180000 | |
42 | SEQUENCE:2 | |
43 | TRANSP:OPAQUE | |
44 | X-MOZ-GENERATION:4 | |
45 | END:VEVENT | |
46 | BEGIN:VEVENT | |
47 | CREATED:20180603T194141Z | |
48 | LAST-MODIFIED:20180603T194144Z | |
49 | DTSTAMP:20180603T194144Z | |
50 | UID:6ff13ee1-e548-41b1-8e08-d7725423743a | |
51 | SUMMARY:recurring | |
52 | RECURRENCE-ID;TZID=Europe/Berlin:20180615T130000 | |
53 | DTSTART;TZID=Europe/Berlin:20180615T110000 | |
54 | DTEND;TZID=Europe/Berlin:20180615T130000 | |
55 | SEQUENCE:2 | |
56 | TRANSP:OPAQUE | |
57 | X-MOZ-GENERATION:4 | |
58 | END:VEVENT | |
59 | END:VCALENDAR |
0 | ||
1 | Samba version 4.7.0 | |
2 | PID Username Group Machine Protocol Version Encryption Signing | |
3 | ---------------------------------------------------------------------------------------------------------------------------------------- |
0 | ||
1 | Samba version 3.5.1 | |
2 | PID Username Group Machine | |
3 | ------------------------------------------------------------------- | |
4 | 14944 <uid> it 131.169.214.117 (131.169.214.117) | |
5 | 14944 <uid> it 131.169.214.117 (131.169.214.117) |
0 | [general] | |
1 | interval = 20 | |
2 | idle_time = 50 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at | |
5 | woke_up_file = @TMPDIR@/test-woke-up | |
6 | ||
7 | [check.XPath] | |
8 | enabled = True | |
9 | xpath = /a | |
10 | url = asdfjlkasdjkfkasdlfjaklsdf |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at | |
5 | woke_up_file = @TMPDIR@/test-woke-up | |
6 | ||
7 | [check.ExternalCommand] | |
8 | enabled = True | |
9 | command = false | |
10 | ||
11 | [wakeup.File] | |
12 | enabled = True | |
13 | path = @TMPDIR@/wakeup_time |
0 | [general] | |
1 | interval = 2 | |
2 | idle_time = 5 | |
3 | suspend_cmd = touch @TMPDIR@/would_suspend | |
4 | wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at | |
5 | woke_up_file = @TMPDIR@/test-woke-up | |
6 | ||
7 | [check.ExternalCommand] | |
8 | enabled = True | |
9 | command = false |
0 | <?xml version="1.0" encoding="UTF-8"?> | |
1 | <MediaContainer size="1"> | |
2 | <Track addedAt="1377889097" duration="242129" grandparentKey="/library/metadata/12276" grandparentRatingKey="12276" grandparentTitle="Ivy Quainoo" guid="local://23987" index="1" key="/library/metadata/23987" lastViewedAt="1517676589" librarySectionID="1" librarySectionKey="/library/sections/1" librarySectionTitle="Musik" originalTitle="Ivy Quainoo" parentIndex="1" parentKey="/library/metadata/23986" parentRatingKey="23986" parentTitle="Ivy [+video]" ratingKey="23987" sessionKey="2" summary="" title="Do You Like What You See" type="track" viewCount="1" viewOffset="50153"> | |
3 | <Media audioChannels="2" audioCodec="mp3" bitrate="246" container="mp3" duration="242129" id="19660" selected="1"> | |
4 | <Part container="mp3" duration="242129" file="/data/music/Amazon/Ivy Quainoo/01 - Do You Like What You See.mp3" hasThumbnail="1" id="19660" key="/library/parts/19660/1377889097/file.mp3" size="7440315" decision="directplay" selected="1"> | |
5 | <Stream albumGain="-9.09" albumPeak="1.000000" albumRange="7.163570" audioChannelLayout="stereo" bitrate="240" channels="2" codec="mp3" gain="-9.03" id="18761" index="0" loudness="-8.97" lra="4.06" peak="1.000000" samplingRate="44100" selected="1" streamType="2" location="direct" /> | |
6 | </Part> | |
7 | </Media> | |
8 | <User id="1" thumb="https://plex.tv/users/91d79663c4d78d14/avatar?c=1526664750" title="my_email@adress" /> | |
9 | <Player address="192.168.1.123" device="AFTN" machineIdentifier="656ab0b970bffaef-com-plexapp-android" model="needle" platform="Android" platformVersion="7.1.2" product="Plex for Android" profile="Android" remotePublicAddress="" state="playing" title="AFTN" vendor="Amazon" version="6.15.2.4143" local="1" userID="1" /> | |
10 | <Session id="656ab0b970bffaef-com-plexapp-android" bandwidth="504" location="lan" /> | |
11 | </Track> | |
12 | </MediaContainer> |
0 | import datetime | |
1 | import logging | |
2 | import os | |
3 | import os.path | |
4 | ||
5 | from freezegun import freeze_time | |
6 | import pytest | |
7 | ||
8 | import autosuspend | |
9 | ||
10 | ||
11 | pytestmark = pytest.mark.integration | |
12 | ||
13 | ||
14 | ROOT = os.path.dirname(os.path.realpath(__file__)) | |
15 | ||
16 | SUSPENSION_FILE = 'would_suspend' | |
17 | SCHEDULED_FILE = 'wakeup_at' | |
18 | WOKE_UP_FILE = 'test-woke-up' | |
19 | NOTIFY_FILE = 'notify' | |
20 | ||
21 | ||
22 | def configure_config(config, tmpdir): | |
23 | out_path = tmpdir.join(config) | |
24 | with open(os.path.join(ROOT, 'test_data', config), 'r') as in_config: | |
25 | with out_path.open('w') as out_config: | |
26 | out_config.write(in_config.read().replace('@TMPDIR@', | |
27 | tmpdir.strpath)) | |
28 | return out_path | |
29 | ||
30 | ||
31 | @pytest.fixture | |
32 | def rapid_sleep(mocker): | |
33 | with freeze_time() as frozen_time: | |
34 | sleep_mock = mocker.patch('time.sleep') | |
35 | sleep_mock.side_effect = lambda seconds: frozen_time.tick( | |
36 | datetime.timedelta(seconds=seconds)) | |
37 | yield frozen_time | |
38 | ||
39 | ||
40 | def test_no_suspend_if_matching(tmpdir, rapid_sleep): | |
41 | autosuspend.main([ | |
42 | '-c', | |
43 | configure_config('dont_suspend.conf', tmpdir).strpath, | |
44 | '-r', | |
45 | '10', | |
46 | '-l']) | |
47 | ||
48 | assert not tmpdir.join(SUSPENSION_FILE).check() | |
49 | ||
50 | ||
51 | def test_suspend(tmpdir, rapid_sleep): | |
52 | autosuspend.main([ | |
53 | '-c', | |
54 | configure_config('would_suspend.conf', tmpdir).strpath, | |
55 | '-r', | |
56 | '10', | |
57 | '-l']) | |
58 | ||
59 | assert tmpdir.join(SUSPENSION_FILE).check() | |
60 | ||
61 | ||
62 | def test_wakeup_scheduled(tmpdir, rapid_sleep): | |
63 | # configure when to wake up | |
64 | now = datetime.datetime.now(datetime.timezone.utc) | |
65 | wakeup_at = now + datetime.timedelta(hours=4) | |
66 | with tmpdir.join('wakeup_time').open('w') as out: | |
67 | out.write(str(wakeup_at.timestamp())) | |
68 | ||
69 | autosuspend.main([ | |
70 | '-c', | |
71 | configure_config('would_schedule.conf', tmpdir).strpath, | |
72 | '-r', | |
73 | '10', | |
74 | '-l']) | |
75 | ||
76 | assert tmpdir.join(SUSPENSION_FILE).check() | |
77 | assert tmpdir.join(SCHEDULED_FILE).check() | |
78 | assert int(tmpdir.join(SCHEDULED_FILE).read()) == int( | |
79 | round((wakeup_at - datetime.timedelta(seconds=30)).timestamp())) | |
80 | ||
81 | ||
82 | def test_woke_up_file_removed(tmpdir, rapid_sleep): | |
83 | tmpdir.join(WOKE_UP_FILE).ensure() | |
84 | autosuspend.main([ | |
85 | '-c', | |
86 | configure_config('dont_suspend.conf', tmpdir).strpath, | |
87 | '-r', | |
88 | '5', | |
89 | '-l']) | |
90 | assert not tmpdir.join(WOKE_UP_FILE).check() | |
91 | ||
92 | ||
93 | def test_notify_call(tmpdir, rapid_sleep): | |
94 | autosuspend.main([ | |
95 | '-c', | |
96 | configure_config('notify.conf', tmpdir).strpath, | |
97 | '-r', | |
98 | '10', | |
99 | '-l']) | |
100 | ||
101 | assert tmpdir.join(SUSPENSION_FILE).check() | |
102 | assert tmpdir.join(NOTIFY_FILE).check() | |
103 | assert len(tmpdir.join(NOTIFY_FILE).read()) == 0 | |
104 | ||
105 | ||
106 | def test_notify_call_wakeup(tmpdir, rapid_sleep): | |
107 | # configure when to wake up | |
108 | now = datetime.datetime.now(datetime.timezone.utc) | |
109 | wakeup_at = now + datetime.timedelta(hours=4) | |
110 | with tmpdir.join('wakeup_time').open('w') as out: | |
111 | out.write(str(wakeup_at.timestamp())) | |
112 | ||
113 | autosuspend.main([ | |
114 | '-c', | |
115 | configure_config('notify_wakeup.conf', tmpdir).strpath, | |
116 | '-r', | |
117 | '10', | |
118 | '-l']) | |
119 | ||
120 | assert tmpdir.join(SUSPENSION_FILE).check() | |
121 | assert tmpdir.join(NOTIFY_FILE).check() | |
122 | assert int(tmpdir.join(NOTIFY_FILE).read()) == int( | |
123 | round((wakeup_at - datetime.timedelta(seconds=10)).timestamp())) | |
124 | ||
125 | ||
126 | def test_temporary_errors_logged(tmpdir, rapid_sleep, caplog): | |
127 | autosuspend.main([ | |
128 | '-c', | |
129 | configure_config('temporary_error.conf', tmpdir).strpath, | |
130 | '-r', | |
131 | '10', | |
132 | '-l']) | |
133 | ||
134 | warnings = [r for r in caplog.record_tuples | |
135 | if r[1] == logging.WARNING and | |
136 | 'XPath' in r[2] and | |
137 | 'failed' in r[2]] | |
138 | ||
139 | assert len(warnings) > 0 |
0 | from datetime import timedelta | |
1 | import os.path | |
2 | ||
3 | from dateutil import parser | |
4 | from dateutil.tz import tzlocal | |
5 | ||
6 | from autosuspend.util.ical import CalendarEvent, list_calendar_events | |
7 | ||
8 | ||
9 | class TestCalendarEvent: | |
10 | ||
11 | def test_str(self): | |
12 | start = parser.parse("2018-06-11 02:00:00 UTC") | |
13 | end = start + timedelta(hours=1) | |
14 | event = CalendarEvent('summary', start, end) | |
15 | ||
16 | assert 'summary' in str(event) | |
17 | ||
18 | ||
19 | class TestListCalendarEvents: | |
20 | ||
21 | def test_simple_recurring(self): | |
22 | """Tests for basic recurrence. | |
23 | ||
24 | Events are collected with the same DST setting as their original | |
25 | creation. | |
26 | """ | |
27 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
28 | 'simple-recurring.ics'), 'rb') as f: | |
29 | start = parser.parse("2018-06-18 04:00:00 UTC") | |
30 | end = start + timedelta(weeks=2) | |
31 | events = list_calendar_events(f, start, end) | |
32 | ||
33 | expected_start_times = [ | |
34 | parser.parse("2018-06-18 07:00:00 UTC"), | |
35 | parser.parse("2018-06-19 07:00:00 UTC"), | |
36 | parser.parse("2018-06-20 07:00:00 UTC"), | |
37 | parser.parse("2018-06-21 07:00:00 UTC"), | |
38 | parser.parse("2018-06-22 07:00:00 UTC"), | |
39 | parser.parse("2018-06-25 07:00:00 UTC"), | |
40 | parser.parse("2018-06-26 07:00:00 UTC"), | |
41 | parser.parse("2018-06-27 07:00:00 UTC"), | |
42 | parser.parse("2018-06-28 07:00:00 UTC"), | |
43 | parser.parse("2018-06-29 07:00:00 UTC"), | |
44 | ] | |
45 | ||
46 | assert expected_start_times == [e.start for e in events] | |
47 | ||
48 | def test_recurrence_different_dst(self): | |
49 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
50 | 'simple-recurring.ics'), 'rb') as f: | |
51 | start = parser.parse("2018-11-19 04:00:00 UTC") | |
52 | end = start + timedelta(weeks=2) | |
53 | events = list_calendar_events(f, start, end) | |
54 | ||
55 | expected_start_times = [ | |
56 | parser.parse("2018-11-19 08:00:00 UTC"), | |
57 | parser.parse("2018-11-20 08:00:00 UTC"), | |
58 | parser.parse("2018-11-21 08:00:00 UTC"), | |
59 | parser.parse("2018-11-22 08:00:00 UTC"), | |
60 | parser.parse("2018-11-23 08:00:00 UTC"), | |
61 | parser.parse("2018-11-26 08:00:00 UTC"), | |
62 | parser.parse("2018-11-27 08:00:00 UTC"), | |
63 | parser.parse("2018-11-28 08:00:00 UTC"), | |
64 | parser.parse("2018-11-29 08:00:00 UTC"), | |
65 | parser.parse("2018-11-30 08:00:00 UTC"), | |
66 | ] | |
67 | ||
68 | assert expected_start_times == [e.start for e in events] | |
69 | ||
70 | def test_all_day_events(self): | |
71 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
72 | 'all-day-events.ics'), 'rb') as f: | |
73 | start = parser.parse("2018-06-11 02:00:00 UTC") | |
74 | end = start + timedelta(weeks=1) | |
75 | events = list_calendar_events(f, start, end) | |
76 | ||
77 | assert len(events) == 3 | |
78 | expected_summaries = ['start', 'between', 'end'] | |
79 | assert [e.summary for e in events] == expected_summaries | |
80 | ||
81 | def test_normal_events(self): | |
82 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
83 | 'normal-events-corner-cases.ics'), 'rb') as f: | |
84 | start = parser.parse("2018-06-04 00:00:00 +0200") | |
85 | end = start + timedelta(weeks=1) | |
86 | events = list_calendar_events(f, start, end) | |
87 | ||
88 | expected = [ | |
89 | ('overlapping', parser.parse("2018-06-02 20:00:00 +0200")), | |
90 | ('before include', parser.parse("2018-06-03 21:00:00 +0200")), | |
91 | ('direct start', parser.parse("2018-06-04 00:00:00 +0200")), | |
92 | ('in between', parser.parse("2018-06-07 04:00:00 +0200")), | |
93 | ('end overlap', parser.parse("2018-06-10 21:00:00 +0200")), | |
94 | ('direct end', parser.parse("2018-06-10 22:00:00 +0200")), | |
95 | ] | |
96 | ||
97 | assert [(e.summary, e.start) for e in events] == expected | |
98 | ||
99 | def test_floating_time(self): | |
100 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
101 | 'floating.ics'), 'rb') as f: | |
102 | start = parser.parse("2018-06-09 00:00:00 +0200") | |
103 | end = start + timedelta(weeks=1) | |
104 | events = list_calendar_events(f, start, end) | |
105 | ||
106 | tzinfo = {'LOCAL': tzlocal()} | |
107 | ||
108 | expected = [ | |
109 | ('floating', parser.parse("2018-06-10 15:00:00 LOCAL", | |
110 | tzinfos=tzinfo)), | |
111 | ('floating recurring', | |
112 | parser.parse("2018-06-12 18:00:00 LOCAL", tzinfos=tzinfo)), | |
113 | ('floating recurring', | |
114 | parser.parse("2018-06-13 18:00:00 LOCAL", tzinfos=tzinfo)), | |
115 | ('floating recurring', | |
116 | parser.parse("2018-06-14 18:00:00 LOCAL", tzinfos=tzinfo)), | |
117 | ('floating recurring', | |
118 | parser.parse("2018-06-15 18:00:00 LOCAL", tzinfos=tzinfo)), | |
119 | ] | |
120 | ||
121 | assert [(e.summary, e.start) for e in events] == expected | |
122 | ||
123 | def test_floating_time_other_dst(self): | |
124 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
125 | 'floating.ics'), 'rb') as f: | |
126 | start = parser.parse("2018-12-09 00:00:00 +0200") | |
127 | end = start + timedelta(weeks=1) | |
128 | events = list_calendar_events(f, start, end) | |
129 | ||
130 | tzinfo = {'LOCAL': tzlocal()} | |
131 | ||
132 | expected = [ | |
133 | ('floating recurring', | |
134 | parser.parse("2018-12-09 18:00:00 LOCAL", tzinfos=tzinfo)), | |
135 | ('floating recurring', | |
136 | parser.parse("2018-12-10 18:00:00 LOCAL", tzinfos=tzinfo)), | |
137 | ('floating recurring', | |
138 | parser.parse("2018-12-11 18:00:00 LOCAL", tzinfos=tzinfo)), | |
139 | ('floating recurring', | |
140 | parser.parse("2018-12-12 18:00:00 LOCAL", tzinfos=tzinfo)), | |
141 | ('floating recurring', | |
142 | parser.parse("2018-12-13 18:00:00 LOCAL", tzinfos=tzinfo)), | |
143 | ('floating recurring', | |
144 | parser.parse("2018-12-14 18:00:00 LOCAL", tzinfos=tzinfo)), | |
145 | ('floating recurring', | |
146 | parser.parse("2018-12-15 18:00:00 LOCAL", tzinfos=tzinfo)), | |
147 | ] | |
148 | ||
149 | assert [(e.summary, e.start) for e in events] == expected | |
150 | ||
151 | def test_exclusions(self): | |
152 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
153 | 'exclusions.ics'), 'rb') as f: | |
154 | start = parser.parse("2018-06-09 04:00:00 UTC") | |
155 | end = start + timedelta(weeks=2) | |
156 | events = list_calendar_events(f, start, end) | |
157 | ||
158 | expected_start_times = [ | |
159 | parser.parse("2018-06-11 12:00:00 UTC"), | |
160 | parser.parse("2018-06-12 12:00:00 UTC"), | |
161 | parser.parse("2018-06-13 12:00:00 UTC"), | |
162 | parser.parse("2018-06-15 12:00:00 UTC"), | |
163 | parser.parse("2018-06-16 12:00:00 UTC"), | |
164 | parser.parse("2018-06-17 12:00:00 UTC"), | |
165 | ] | |
166 | ||
167 | assert expected_start_times == [e.start for e in events] | |
168 | ||
169 | def test_reucrring_single_changes(self): | |
170 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
171 | 'single-change.ics'), 'rb') as f: | |
172 | start = parser.parse("2018-06-11 00:00:00 UTC") | |
173 | end = start + timedelta(weeks=1) | |
174 | events = list_calendar_events(f, start, end) | |
175 | ||
176 | expected_start_times = [ | |
177 | parser.parse("2018-06-11 11:00:00 UTC"), | |
178 | parser.parse("2018-06-12 11:00:00 UTC"), | |
179 | parser.parse("2018-06-13 14:00:00 UTC"), | |
180 | parser.parse("2018-06-14 11:00:00 UTC"), | |
181 | parser.parse("2018-06-15 09:00:00 UTC"), | |
182 | parser.parse("2018-06-16 11:00:00 UTC"), | |
183 | parser.parse("2018-06-17 11:00:00 UTC"), | |
184 | ] | |
185 | ||
186 | assert expected_start_times == [e.start for e in events] | |
187 | ||
188 | def test_reucrring_change_dst(self): | |
189 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
190 | 'recurring-change-dst.ics'), 'rb') as f: | |
191 | start = parser.parse("2018-12-10 00:00:00 UTC") | |
192 | end = start + timedelta(weeks=1) | |
193 | events = list_calendar_events(f, start, end) | |
194 | ||
195 | expected_start_times = [ | |
196 | parser.parse("2018-12-10 13:00:00 UTC"), | |
197 | parser.parse("2018-12-11 13:00:00 UTC"), | |
198 | parser.parse("2018-12-12 10:00:00 UTC"), | |
199 | parser.parse("2018-12-13 13:00:00 UTC"), | |
200 | parser.parse("2018-12-15 13:00:00 UTC"), | |
201 | parser.parse("2018-12-16 13:00:00 UTC"), | |
202 | ] | |
203 | ||
204 | assert expected_start_times == [e.start for e in events] | |
205 | ||
206 | def test_recurring_start_and_end_inclusive(self): | |
207 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
208 | 'issue-41.ics'), 'rb') as f: | |
209 | start = parser.parse("2018-06-26 15:13:51 UTC") | |
210 | end = start + timedelta(weeks=1) | |
211 | events = list_calendar_events(f, start, end) | |
212 | ||
213 | expected_start_times = [ | |
214 | parser.parse("2018-06-26 15:00:00 UTC"), | |
215 | parser.parse("2018-06-27 15:00:00 UTC"), | |
216 | parser.parse("2018-06-28 15:00:00 UTC"), | |
217 | parser.parse("2018-06-29 15:00:00 UTC"), | |
218 | parser.parse("2018-06-30 15:00:00 UTC"), | |
219 | parser.parse("2018-07-01 15:00:00 UTC"), | |
220 | parser.parse("2018-07-02 15:00:00 UTC"), | |
221 | parser.parse("2018-07-03 15:00:00 UTC"), | |
222 | ] | |
223 | ||
224 | assert expected_start_times == [e.start for e in events] | |
225 | ||
226 | def test_single_start_end_inclusive(self): | |
227 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
228 | 'old-event.ics'), 'rb') as f: | |
229 | start = parser.parse("2004-06-05 11:15:00 UTC") | |
230 | end = start + timedelta(hours=1) | |
231 | events = list_calendar_events(f, start, end) | |
232 | ||
233 | expected_start_times = [ | |
234 | parser.parse("2004-06-05 11:00:00 UTC"), | |
235 | ] | |
236 | ||
237 | assert expected_start_times == [e.start for e in events] | |
238 | ||
239 | def test_single_all_day_start_end_inclusive(self): | |
240 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
241 | 'all-day-starts.ics'), 'rb') as f: | |
242 | start = parser.parse("2018-06-25 10:00:00 UTC") | |
243 | end = start + timedelta(hours=2) | |
244 | events = list_calendar_events(f, start, end) | |
245 | ||
246 | expected_start_times = [ | |
247 | parser.parse("2018-06-25 02:00:00 UTC").date(), | |
248 | ] | |
249 | ||
250 | assert expected_start_times == [e.start for e in events] | |
251 | ||
252 | def test_longer_single_all_day_start_end_inclusive(self): | |
253 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
254 | 'all-day-starts.ics'), 'rb') as f: | |
255 | start = parser.parse("2018-06-29 10:00:00 UTC") | |
256 | end = start + timedelta(hours=2) | |
257 | events = list_calendar_events(f, start, end) | |
258 | ||
259 | expected_start_times = [ | |
260 | parser.parse("2018-06-28 02:00:00 UTC").date(), | |
261 | ] | |
262 | ||
263 | assert expected_start_times == [e.start for e in events] | |
264 | ||
265 | def test_recurring_all_day_start_end_inclusive(self): | |
266 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
267 | 'all-day-recurring.ics'), 'rb') as f: | |
268 | start = parser.parse("2018-06-29 10:00:00 UTC") | |
269 | end = start + timedelta(hours=2) | |
270 | events = list_calendar_events(f, start, end) | |
271 | ||
272 | expected_start_times = [ | |
273 | parser.parse("2018-06-29 02:00:00 UTC").date(), | |
274 | ] | |
275 | ||
276 | assert expected_start_times == [e.start for e in events] | |
277 | ||
278 | def test_recurring_all_day_start_in_between(self): | |
279 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
280 | 'all-day-recurring.ics'), 'rb') as f: | |
281 | start = parser.parse("2018-06-29 00:00:00 UTC") | |
282 | end = start + timedelta(days=1) | |
283 | events = list_calendar_events(f, start, end) | |
284 | ||
285 | expected_start_times = [ | |
286 | parser.parse("2018-06-29 00:00:00 UTC").date(), | |
287 | parser.parse("2018-06-30 00:00:00 UTC").date(), | |
288 | ] | |
289 | ||
290 | assert expected_start_times == [e.start for e in events] | |
291 | ||
292 | def test_recurring_all_day_exclusions(self): | |
293 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
294 | 'all-day-recurring-exclusions.ics'), 'rb') as f: | |
295 | start = parser.parse("2018-06-27 00:00:00 UTC") | |
296 | end = start + timedelta(days=4) | |
297 | events = list_calendar_events(f, start, end) | |
298 | ||
299 | expected_start_times = [ | |
300 | parser.parse("2018-06-27 00:00:00 UTC").date(), | |
301 | parser.parse("2018-06-28 00:00:00 UTC").date(), | |
302 | parser.parse("2018-06-29 00:00:00 UTC").date(), | |
303 | parser.parse("2018-07-01 00:00:00 UTC").date(), | |
304 | ] | |
305 | ||
306 | assert expected_start_times == [e.start for e in events] | |
307 | ||
308 | def test_recurring_all_day_exclusions_end(self): | |
309 | with open(os.path.join(os.path.dirname(__file__), 'test_data', | |
310 | 'all-day-recurring-exclusions.ics'), 'rb') as f: | |
311 | start = parser.parse("2018-06-26 00:00:00 UTC") | |
312 | end = start + timedelta(days=4) | |
313 | events = list_calendar_events(f, start, end) | |
314 | ||
315 | expected_start_times = [ | |
316 | parser.parse("2018-06-26 00:00:00 UTC").date(), | |
317 | parser.parse("2018-06-27 00:00:00 UTC").date(), | |
318 | parser.parse("2018-06-28 00:00:00 UTC").date(), | |
319 | parser.parse("2018-06-29 00:00:00 UTC").date(), | |
320 | ] | |
321 | ||
322 | assert expected_start_times == [e.start for e in events] |
0 | import pytest | |
1 | ||
2 | from autosuspend.util.systemd import list_logind_sessions | |
3 | ||
4 | ||
5 | def test_list_logind_sessions(): | |
6 | pytest.importorskip('dbus') | |
7 | ||
8 | assert list_logind_sessions() is not None |
0 | [tox] | |
1 | envlist = test-py35-psutil50-dateutil26,test-py36-psutil{50,latest}-dateutil{26,latest},test-py37-psutillatest-dateutillatest, integration-py{35,36}, mindeps, check, docs | |
2 | ||
3 | [testenv] | |
4 | passenv = TOXENV CI TRAVIS TRAVIS_* | |
5 | extras = Mpd,Kodi,XPath,ical,localfiles,test | |
6 | deps = | |
7 | codecov | |
8 | psutil50: psutil>=5.0,<5.1 | |
9 | psutil54: psutil>=5.4,<5.5 | |
10 | psutillatest: psutil | |
11 | dateutil26: python-dateutil>=2.6,<2.7 | |
12 | dateutil27: python-dateutil>=2.7,<2.8 | |
13 | dateutillatest: python-dateutil | |
14 | commands = | |
15 | {envbindir}/python -V | |
16 | {envbindir}/python -c 'import psutil; print(psutil.__version__)' | |
17 | {envbindir}/python -c 'import dateutil; print(dateutil.__version__)' | |
18 | test: {envbindir}/pytest --cov=autosuspend -m "not integration" {posargs} | |
19 | integration: {envbindir}/pytest --cov=autosuspend -m "integration" {posargs} | |
20 | {envbindir}/codecov -e TOXENV | |
21 | ||
22 | [testenv:mindeps] | |
23 | description = tests whether the project can be used without any extras | |
24 | extras = | |
25 | deps = | |
26 | commands = | |
27 | {envbindir}/python -V | |
28 | {envbindir}/python -c "import autosuspend; import autosuspend.checks.activity; import autosuspend.checks.wakeup" | |
29 | {envbindir}/autosuspend -c tests/test_data/mindeps-test.conf -r 1 | |
30 | ||
31 | [testenv:check] | |
32 | deps = | |
33 | flake8 | |
34 | flake8-docstrings | |
35 | flake8-per-file-ignores | |
36 | flake8-import-order | |
37 | flake8-print | |
38 | flake8-string-format | |
39 | pep8-naming | |
40 | flake8-comprehensions | |
41 | flake8-bugbear | |
42 | flake8-builtins | |
43 | mypy | |
44 | commands = | |
45 | {envbindir}/python -V | |
46 | {envbindir}/flake8 {posargs} | |
47 | {envbindir}/mypy src | |
48 | ||
49 | [testenv:docs] | |
50 | basepython = python3.6 | |
51 | deps = | |
52 | sphinx | |
53 | sphinx_rtd_theme | |
54 | sphinx-issues | |
55 | commands = {envbindir}/sphinx-build -W -b html -d {envtmpdir}/doctrees doc/source {envtmpdir}/html | |
56 | ||
57 | [testenv:devenv] | |
58 | envdir = env | |
59 | basepython = python3.6 | |
60 | usedevelop = True | |
61 | deps = | |
62 | psutil | |
63 | python-mpd2 | |
64 | requests | |
65 | requests-file | |
66 | lxml | |
67 | dbus-python | |
68 | icalendar | |
69 | python-dateutil | |
70 | pytest | |
71 | pytest-cov | |
72 | pytest-mock | |
73 | freezegun | |
74 | flake8 | |
75 | flake8-mypy | |
76 | flake8-docstrings | |
77 | flake8-per-file-ignores | |
78 | commands = | |
79 | ||
80 | [travis] | |
81 | python = | |
82 | 3.6: py36, check, mindeps, docs | |
83 | ||
84 | [travis:env] | |
85 | TASK= | |
86 | test: test | |
87 | checks: check, mindeps, docs | |
88 | integration: integration |