Codebase list dfdatetime / a1a1c7f
New upstream version 20161017 Hilko Bengen 7 years ago
42 changed file(s) with 7295 addition(s) and 248 deletion(s). Raw diff Collapse all Expand all
0 # Files to ignore by git.
1
2 # Back-up files
3 *~
4 *.swp
5
6 # Generic auto-generated build files
7 *.pyc
8 *.pyo
9
10 # Specific auto-generated build files
11 /.tox
12 /__pycache__
13 /build
14 /dfdatetime.egg-info
15 /dist
16
17 # Code review files
18 /.review
19
20 # Test coverage files
21 .coverage
22 tests-coverage.txt
0 [style]
1 based_on_style = chromium
2 COALESCE_BRACKETS = True
3 SPLIT_BEFORE_FIRST_ARGUMENT = True
0 language: python
1 matrix:
2 include:
3 - os: linux
4 dist: trusty
5 sudo: required
6 python: 2.7
7 - os: linux
8 dist: trusty
9 sudo: required
10 python: 3.4
11 - os: osx
12 language: generic
13 install:
14 - ./config/travis/install.sh
15 script:
16 - if test `uname -s` = 'Linux' && test ${TRAVIS_PYTHON_VERSION} = "2.7"; then coverage run --source=dfdatetime --omit="*_test*,*__init__*,*test_lib*" ./run_tests.py; else ./run_tests.py; fi
17 - python setup.py build
18 - python setup.py sdist
19 - python setup.py bdist
20 - if test `uname -s` = 'Linux'; then mkdir -p ${PWD}/tmp/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/ && PYTHONPATH=${PWD}/tmp/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/ python setup.py install --prefix=${PWD}/tmp/; fi
21 after_success:
22 - if test `uname -s` = 'Linux' && test ${TRAVIS_PYTHON_VERSION} = "2.7"; then coveralls --verbose; fi
+0
-14
PKG-INFO less more
0 Metadata-Version: 1.1
1 Name: dfdatetime
2 Version: 20160323
3 Summary: Digital Forensics Date and Time (dfDateTime).
4 Home-page: https://github.com/log2timeline/dfdatetime
5 Author: dfDateTime development team
6 Author-email: log2timeline-dev@googlegroups.com
7 License: Apache License, Version 2.0
8 Description: dfDateTime, or Digital Forensics date and time, provides date and time objects to preserve accuracy and precision.
9 Platform: UNKNOWN
10 Classifier: Development Status :: 3 - Alpha
11 Classifier: Environment :: Console
12 Classifier: Operating System :: OS Independent
13 Classifier: Programming Language :: Python
0 environment:
1 matrix:
2 - PYTHON: "C:\\Python27"
3
4 build: off
5
6 test_script:
7 - "%PYTHON%\\python.exe run_tests.py"
0 dfdatetime (20160323-1) unstable; urgency=low
0 dfdatetime (20161017-1) unstable; urgency=low
11
22 * Auto-generated
33
4 -- Log2Timeline <log2timeline-dev@googlegroups.com> Wed, 23 Mar 2016 22:00:46 +0100
4 -- Log2Timeline <log2timeline-dev@googlegroups.com> Mon, 17 Oct 2016 07:10:42 +0200
0 #!/bin/bash
1 #
2 # Script to set up Travis-CI test VM.
3
4 COVERALL_DEPENDENCIES="python-coverage python-coveralls python-docopt";
5
6 # Exit on error.
7 set -e;
8
9 if test `uname -s` = "Linux";
10 then
11 sudo add-apt-repository ppa:gift/dev -y;
12 sudo apt-get update -q;
13 sudo apt-get install -y ${COVERALL_DEPENDENCIES};
14 fi
44 objects to preserve accuracy and precision.
55 """
66
7 __version__ = '20160323'
7 __version__ = '20161017'
0 # -*- coding: utf-8 -*-
1 """Cocoa timestamp implementation."""
2
3 from dfdatetime import definitions
4 from dfdatetime import interface
5
6
7 class CocoaTime(interface.DateTimeValues):
8 """Class that implements a Cocoa timestamp.
9
10 The Cocoa timestamp is a floating point value that contains the number of
11 seconds since 2001-01-01 00:00:00 (also known as the Cocoa epoch).
12 Negative values represent date and times predating the Cocoa epoch.
13
14 Also see:
15 https://developer.apple.com/library/ios/documentation/cocoa/Conceptual/
16 DatesAndTimes/Articles/dtDates.html
17
18 Attributes:
19 is_local_time (bool): True if the date and time value is in local time.
20 precision (str): precision of the date and time value, which should
21 be one of the PRECISION_VALUES in definitions.
22 timestamp (float): Cocoa timestamp.
23 """
24 # The difference between Jan 1, 2001 and Jan 1, 1970 in seconds.
25 _COCOA_TO_POSIX_BASE = -978307200
26
27 def __init__(self, timestamp=None):
28 """Initializes a Cocoa timestamp.
29
30 Args:
31 timestamp (Optional[float]): Cocoa timestamp.
32 """
33 super(CocoaTime, self).__init__()
34 self.precision = definitions.PRECISION_1_SECOND
35 self.timestamp = timestamp
36
37 def CopyFromString(self, time_string):
38 """Copies a Cocoa timestamp from a string containing a date and time value.
39
40 Args:
41 time_string (str): date and time value formatted as:
42 YYYY-MM-DD hh:mm:ss.######[+-]##:##
43
44 Where # are numeric digits ranging from 0 to 9 and the seconds
45 fraction can be either 3 or 6 digits. The time of day, seconds
46 fraction and time zone offset are optional. The default time zone
47 is UTC.
48
49 Raises:
50 ValueError: if the time string is invalid or not supported.
51 """
52 date_time_values = self._CopyDateTimeFromString(time_string)
53
54 year = date_time_values.get(u'year', 0)
55 month = date_time_values.get(u'month', 0)
56 day_of_month = date_time_values.get(u'day_of_month', 0)
57 hours = date_time_values.get(u'hours', 0)
58 minutes = date_time_values.get(u'minutes', 0)
59 seconds = date_time_values.get(u'seconds', 0)
60 microseconds = date_time_values.get(u'microseconds', None)
61
62 timestamp = self._GetNumberOfSecondsFromElements(
63 year, month, day_of_month, hours, minutes, seconds)
64 timestamp += self._COCOA_TO_POSIX_BASE
65
66 timestamp = float(timestamp)
67 if microseconds is not None:
68 timestamp += float(microseconds) / 1000000
69
70 self.timestamp = timestamp
71 self.is_local_time = False
72
73 def CopyToStatTimeTuple(self):
74 """Copies the Cocoa timestamp to a stat timestamp tuple.
75
76 Returns:
77 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
78 100 nano seconds or (None, None) on error.
79 """
80 if self.timestamp is None:
81 return None, None
82
83 timestamp = self.timestamp - self._COCOA_TO_POSIX_BASE
84 remainder = int((timestamp % 1) * 10000000)
85 return int(timestamp), remainder
86
87 def GetPlasoTimestamp(self):
88 """Retrieves a timestamp that is compatible with plaso.
89
90 Returns:
91 int: a POSIX timestamp in microseconds or None on error.
92 """
93 if self.timestamp is None:
94 return
95
96 timestamp = (self.timestamp - self._COCOA_TO_POSIX_BASE) * 1000000
97 return int(timestamp)
0 # -*- coding: utf-8 -*-
1 """The date and time definitions."""
2
3 PRECISION_1_DAY = u'1d'
4 PRECISION_1_NANOSECOND = u'1ns'
5 PRECISION_100_NANOSECONDS = u'100ns'
6 PRECISION_1_MICROSECOND = u'1us'
7 PRECISION_1_MILLISECOND = u'1ms'
8 PRECISION_1_SECOND = u'1s'
9 PRECISION_2_SECONDS = u'2s'
10
11 PRECISION_VALUES = frozenset([
12 PRECISION_1_DAY,
13 PRECISION_1_NANOSECOND,
14 PRECISION_100_NANOSECONDS,
15 PRECISION_1_MICROSECOND,
16 PRECISION_1_MILLISECOND,
17 PRECISION_1_SECOND,
18 PRECISION_2_SECONDS])
00 # -*- coding: utf-8 -*-
11 """Fake timestamp implementation."""
22
3 import calendar
43 import time
54
5 from dfdatetime import definitions
66 from dfdatetime import interface
77
88
99 class FakeTime(interface.DateTimeValues):
10 """Class that implements a fake timestamp."""
10 """Class that implements a fake timestamp.
11
12 The fake timestamp is intended for testing purposes. On initialization
13 it contains the current time in UTC in microsecond precision.
14
15 Attributes:
16 is_local_time (bool): True if the date and time value is in local time.
17 precision (str): precision of the date and time value, which should
18 be one of the PRECISION_VALUES in definitions.
19 """
20
21 def __init__(self):
22 """Initializes a fake timestamp."""
23 super(FakeTime, self).__init__()
24 # Note that time.time() and divmod return floating point values.
25 timestamp, fraction_of_second = divmod(time.time(), 1)
26 self._microseconds = int(fraction_of_second * 1000000)
27 self._number_of_seconds = int(timestamp)
28 self.precision = definitions.PRECISION_1_MICROSECOND
29
30 def CopyFromString(self, time_string):
31 """Copies a fake timestamp from a string containing a date and time value.
32
33 Args:
34 time_string (str): date and time value formatted as:
35 YYYY-MM-DD hh:mm:ss.######[+-]##:##
36
37 Where # are numeric digits ranging from 0 to 9 and the seconds
38 fraction can be either 3 or 6 digits. The time of day, seconds
39 fraction and time zone offset are optional. The default time zone
40 is UTC.
41 """
42 date_time_values = self._CopyDateTimeFromString(time_string)
43
44 year = date_time_values.get(u'year', 0)
45 month = date_time_values.get(u'month', 0)
46 day_of_month = date_time_values.get(u'day_of_month', 0)
47 hours = date_time_values.get(u'hours', 0)
48 minutes = date_time_values.get(u'minutes', 0)
49 seconds = date_time_values.get(u'seconds', 0)
50
51 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
52 year, month, day_of_month, hours, minutes, seconds)
53 self._microseconds = date_time_values.get(u'microseconds', None)
54
55 self.is_local_time = False
1156
1257 def CopyToStatTimeTuple(self):
13 """Copies the timestamp to a stat timestamp tuple.
58 """Copies the fake timestamp to a stat timestamp tuple.
1459
1560 Returns:
16 A tuple of an integer containing a POSIX timestamp in seconds
17 and an integer containing the remainder in 100 nano seconds.
18 Currently the remainder will always be 0.
61 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
62 100 nano seconds or (None, None) on error.
1963 """
20 time_elements = time.gmtime()
21 return calendar.timegm(time_elements), 0
64 if self._number_of_seconds is None:
65 return None, None
66
67 if self._microseconds is not None:
68 return self._number_of_seconds, self._microseconds * 10
69
70 return self._number_of_seconds, None
71
72 def GetPlasoTimestamp(self):
73 """Retrieves a timestamp that is compatible with plaso.
74
75 Returns:
76 int: a POSIX timestamp in microseconds or None on error.
77 """
78 if self._number_of_seconds is None:
79 return
80
81 if self._microseconds is not None:
82 return (self._number_of_seconds * 1000000) + self._microseconds
83
84 return self._number_of_seconds * 1000000
0 # -*- coding: utf-8 -*-
1 """FAT date time implementation."""
2
3 from dfdatetime import definitions
4 from dfdatetime import interface
5
6
7 class FATDateTime(interface.DateTimeValues):
8 """Class that implements a FAT date time.
9
10 The FAT date time is mainly used in DOS/Windows file formats and FAT.
11
12 The FAT date and time is a 32-bit value containing two 16-bit values:
13 * The date (lower 16-bit).
14 * bits 0 - 4: day of month, where 1 represents the first day
15 * bits 5 - 8: month of year, where 1 represent January
16 * bits 9 - 15: year since 1980
17 * The time of day (upper 16-bit).
18 * bits 0 - 4: seconds (in 2 second intervals)
19 * bits 5 - 10: minutes
20 * bits 11 - 15: hours
21
22 The FAT date time has no time zone information and is typically stored
23 in the local time of the computer.
24
25 Attributes:
26 is_local_time (bool): True if the date and time value is in local time.
27 precision (str): precision of the date and time value, which should
28 be one of the PRECISION_VALUES in definitions.
29 """
30
31 # The difference between Jan 1, 1980 and Jan 1, 1970 in seconds.
32 _FAT_DATE_TO_POSIX_BASE = 315532800
33
34 def __init__(self, fat_date_time=None):
35 """Initializes a FAT date time.
36
37 Args:
38 fat_date_time (Optional[int]): FAT date time.
39 """
40 number_of_seconds = None
41 if fat_date_time is not None:
42 number_of_seconds = self._GetNumberOfSeconds(fat_date_time)
43
44 super(FATDateTime, self).__init__()
45 self._number_of_seconds = number_of_seconds
46 self.precision = definitions.PRECISION_2_SECONDS
47
48 def _GetNumberOfSeconds(self, fat_date_time):
49 """Retrieves the number of seconds from a FAT date time.
50
51 Args:
52 fat_date_time (int): FAT date time.
53
54 Returns:
55 int: number of seconds since January 1, 1980 00:00:00.
56
57 Raises:
58 ValueError: if the month, day of month, hours, minutes or seconds
59 value is out of bounds.
60 """
61 day_of_month = (fat_date_time & 0x1f)
62 month = ((fat_date_time >> 5) & 0x0f)
63 year = (fat_date_time >> 9) & 0x7f
64
65 days_per_month = self._GetDaysPerMonth(year, month)
66 if day_of_month < 1 or day_of_month > days_per_month:
67 raise ValueError(u'Day of month value out of bounds.')
68
69 number_of_days = self._GetDayOfYear(1980 + year, month, day_of_month)
70 number_of_days -= 1
71 for past_year in range(0, year):
72 number_of_days += self._GetNumberOfDaysInYear(past_year)
73
74 fat_date_time >>= 16
75
76 seconds = (fat_date_time & 0x1f) * 2
77 minutes = (fat_date_time >> 5) & 0x3f
78 hours = (fat_date_time >> 11) & 0x1f
79
80 if hours not in range(0, 24):
81 raise ValueError(u'Hours value out of bounds.')
82
83 if minutes not in range(0, 60):
84 raise ValueError(u'Minutes value out of bounds.')
85
86 if seconds not in range(0, 60):
87 raise ValueError(u'Seconds value out of bounds.')
88
89 number_of_seconds = (((hours * 60) + minutes) * 60) + seconds
90 number_of_seconds += number_of_days * self._SECONDS_PER_DAY
91 return number_of_seconds
92
93 def CopyFromString(self, time_string):
94 """Copies a FAT date time from a string containing a date and time value.
95
96 Args:
97 time_string (str): date and time value formatted as:
98 YYYY-MM-DD hh:mm:ss.######[+-]##:##
99
100 Where # are numeric digits ranging from 0 to 9 and the seconds
101 fraction can be either 3 or 6 digits. The time of day, seconds
102 fraction and time zone offset are optional. The default time zone
103 is UTC.
104
105 Raises:
106 ValueError: if the time string is invalid or not supported.
107 """
108 date_time_values = self._CopyDateTimeFromString(time_string)
109
110 year = date_time_values.get(u'year', 0)
111 month = date_time_values.get(u'month', 0)
112 day_of_month = date_time_values.get(u'day_of_month', 0)
113 hours = date_time_values.get(u'hours', 0)
114 minutes = date_time_values.get(u'minutes', 0)
115 seconds = date_time_values.get(u'seconds', 0)
116
117 if year < 1980 or year > (1980 + 0x7f):
118 raise ValueError(u'Year value not supported: {0!s}.'.format(year))
119
120 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
121 year, month, day_of_month, hours, minutes, seconds)
122 self._number_of_seconds -= self._FAT_DATE_TO_POSIX_BASE
123
124 self.is_local_time = False
125
126 def CopyToStatTimeTuple(self):
127 """Copies the FAT date time to a stat timestamp tuple.
128
129 Returns:
130 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
131 100 nano seconds or (None, None) on error.
132 """
133 if self._number_of_seconds is None or self._number_of_seconds < 0:
134 return None, None
135
136 timestamp = self._number_of_seconds + self._FAT_DATE_TO_POSIX_BASE
137 return timestamp, None
138
139 def GetPlasoTimestamp(self):
140 """Retrieves a timestamp that is compatible with plaso.
141
142 Returns:
143 int: a POSIX timestamp in microseconds or None on error.
144 """
145 if self._number_of_seconds is None or self._number_of_seconds < 0:
146 return
147
148 return (self._number_of_seconds + self._FAT_DATE_TO_POSIX_BASE) * 1000000
00 # -*- coding: utf-8 -*-
11 """FILETIME timestamp implementation."""
22
3 import calendar
4
3 from dfdatetime import definitions
54 from dfdatetime import interface
65
76
1514 2 x 32-bit integers and is presumed to be unsigned.
1615
1716 Attributes:
18 timestamp: the FILETIME timestamp.
17 is_local_time (bool): True if the date and time value is in local time.
18 precision (str): precision of the date and time value, which should
19 be one of the PRECISION_VALUES in definitions.
20 timestamp (int): FILETIME timestamp.
1921 """
2022
2123 # The difference between Jan 1, 1601 and Jan 1, 1970 in seconds.
2224 _FILETIME_TO_POSIX_BASE = 11644473600
23 _INT64_MAX = (1 << 63) - 1
25 _UINT64_MAX = (1 << 64) - 1
2426
2527 def __init__(self, timestamp=None):
26 """Initializes a FILETIME object.
28 """Initializes a FILETIME timestamp.
2729
2830 Args:
29 timestamp: optional FILETIME timestamp.
31 timestamp (Optional[int]): FILETIME timestamp.
3032 """
3133 super(Filetime, self).__init__()
34 self.precision = definitions.PRECISION_100_NANOSECONDS
3235 self.timestamp = timestamp
3336
3437 def CopyFromString(self, time_string):
3538 """Copies a FILETIME from a string containing a date and time value.
3639
3740 Args:
38 time_string: a string containing a date and time value formatted as:
39 YYYY-MM-DD hh:mm:ss.######[+-]##:##
40 Where # are numeric digits ranging from 0 to 9 and the
41 seconds fraction can be either 3 or 6 digits. The time
42 of day, seconds fraction and timezone offset are optional.
43 The default timezone is UTC.
41 time_string (str): date and time value formatted as:
42 YYYY-MM-DD hh:mm:ss.######[+-]##:##
4443
45 Returns:
46 An integer containing the timestamp.
44 Where # are numeric digits ranging from 0 to 9 and the seconds
45 fraction can be either 3 or 6 digits. The time of day, seconds
46 fraction and time zone offset are optional. The default time zone
47 is UTC.
4748
4849 Raises:
4950 ValueError: if the time string is invalid or not supported.
5051 """
51 if not time_string:
52 raise ValueError(u'Invalid time string.')
52 date_time_values = self._CopyDateTimeFromString(time_string)
5353
54 time_string_length = len(time_string)
54 year = date_time_values.get(u'year', 0)
55 month = date_time_values.get(u'month', 0)
56 day_of_month = date_time_values.get(u'day_of_month', 0)
57 hours = date_time_values.get(u'hours', 0)
58 minutes = date_time_values.get(u'minutes', 0)
59 seconds = date_time_values.get(u'seconds', 0)
5560
56 year, month, day_of_month = self._CopyDateFromString(time_string)
61 if year < 1601:
62 raise ValueError(u'Year value not supported: {0!s}.'.format(year))
5763
58 hours = 0
59 minutes = 0
60 seconds = 0
61 micro_seconds = 0
62 timezone_offset = 0
63
64 if time_string_length > 10:
65 # If a time of day is specified the time string it should at least
66 # contain 'YYYY-MM-DD hh:mm:ss'.
67 if time_string[10] != u' ':
68 raise ValueError(u'Invalid time string.')
69
70 hours, minutes, seconds, micro_seconds, timezone_offset = (
71 self._CopyTimeFromString(time_string[11:]))
72
73 self.timestamp = int(calendar.timegm((
74 year, month, day_of_month, hours, minutes, seconds)))
75
76 self.timestamp += timezone_offset + self._FILETIME_TO_POSIX_BASE
77 self.timestamp = (self.timestamp * 1000000) + micro_seconds
64 self.timestamp = self._GetNumberOfSecondsFromElements(
65 year, month, day_of_month, hours, minutes, seconds)
66 self.timestamp += self._FILETIME_TO_POSIX_BASE
67 self.timestamp *= 1000000
68 self.timestamp += date_time_values.get(u'microseconds', 0)
7869 self.timestamp *= 10
7970
71 self.is_local_time = False
72
8073 def CopyToStatTimeTuple(self):
81 """Copies the timestamp to a stat timestamp tuple.
74 """Copies the FILETIME timestamp to a stat timestamp tuple.
8275
8376 Returns:
84 A tuple of an integer containing a POSIX timestamp in seconds
85 and an integer containing the remainder in 100 nano seconds or
86 None on error.
77 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
78 100 nano seconds or (None, None) on error.
8779 """
88 if self.timestamp < 0:
80 if (self.timestamp is None or self.timestamp < 0 or
81 self.timestamp > self._UINT64_MAX):
8982 return None, None
9083
9184 timestamp, remainder = divmod(self.timestamp, 10000000)
9285 timestamp -= self._FILETIME_TO_POSIX_BASE
93 if timestamp > self._INT64_MAX:
94 return None, None
9586 return timestamp, remainder
87
88 def GetPlasoTimestamp(self):
89 """Retrieves a timestamp that is compatible with plaso.
90
91 Returns:
92 int: a POSIX timestamp in microseconds or None on error.
93 """
94 if (self.timestamp is None or self.timestamp < 0 or
95 self.timestamp > self._UINT64_MAX):
96 return
97
98 timestamp, _ = divmod(self.timestamp, 10)
99 return timestamp - (self._FILETIME_TO_POSIX_BASE * 1000000)
0 # -*- coding: utf-8 -*-
1 """HFS timestamp implementation."""
2
3 from dfdatetime import definitions
4 from dfdatetime import interface
5
6
7 class HFSTime(interface.DateTimeValues):
8 """Class that implements a HFS timestamp.
9
10 The HFS timestamp is an unsigned 32-bit integer that contains the number of
11 seconds since 1904-01-01 00:00:00. Where in HFS the timestamp is typically
12 in local time and in HFS+/HFSX in UTC.
13
14 Attributes:
15 is_local_time (bool): True if the date and time value is in local time.
16 precision (str): precision of the date and time value, which should
17 be one of the PRECISION_VALUES in definitions.
18 timestamp (int): HFS timestamp.
19 """
20 # The difference between Jan 1, 1904 and Jan 1, 1970 in seconds.
21 _HFS_TO_POSIX_BASE = 2082844800
22 _UINT32_MAX = (1 << 32) - 1
23
24 def __init__(self, timestamp=None):
25 """Initializes a HFS timestamp.
26
27 Args:
28 timestamp (Optional[int]): HFS timestamp.
29 """
30 super(HFSTime, self).__init__()
31 self.precision = definitions.PRECISION_1_SECOND
32 self.timestamp = timestamp
33
34 def CopyFromString(self, time_string):
35 """Copies a HFS timestamp from a string containing a date and time value.
36
37 Args:
38 time_string (str): date and time value formatted as:
39 YYYY-MM-DD hh:mm:ss.######[+-]##:##
40
41 Where # are numeric digits ranging from 0 to 9 and the seconds
42 fraction can be either 3 or 6 digits. The time of day, seconds
43 fraction and time zone offset are optional. The default time zone
44 is UTC.
45
46 Raises:
47 ValueError: if the time string is invalid or not supported.
48 """
49 date_time_values = self._CopyDateTimeFromString(time_string)
50
51 year = date_time_values.get(u'year', 0)
52 month = date_time_values.get(u'month', 0)
53 day_of_month = date_time_values.get(u'day_of_month', 0)
54 hours = date_time_values.get(u'hours', 0)
55 minutes = date_time_values.get(u'minutes', 0)
56 seconds = date_time_values.get(u'seconds', 0)
57
58 if year < 1904 or year > 2040:
59 raise ValueError(u'Year value not supported.')
60
61 self.timestamp = self._GetNumberOfSecondsFromElements(
62 year, month, day_of_month, hours, minutes, seconds)
63 self.timestamp += self._HFS_TO_POSIX_BASE
64
65 self.is_local_time = False
66
67 def CopyToStatTimeTuple(self):
68 """Copies the HFS timestamp to a stat timestamp tuple.
69
70 Returns:
71 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
72 100 nano seconds or (None, None) on error.
73 """
74 if (self.timestamp is None or self.timestamp < 0 or
75 self.timestamp > self._UINT32_MAX):
76 return None, None
77
78 timestamp = self.timestamp - self._HFS_TO_POSIX_BASE
79 return timestamp, 0
80
81 def GetPlasoTimestamp(self):
82 """Retrieves a timestamp that is compatible with plaso.
83
84 Returns:
85 int: a POSIX timestamp in microseconds or None on error.
86 """
87 if (self.timestamp is None or self.timestamp < 0 or
88 self.timestamp > self._UINT32_MAX):
89 return
90
91 timestamp = self.timestamp - self._HFS_TO_POSIX_BASE
92 return timestamp * 1000000
11 """Date and time values interface."""
22
33 import abc
4 import calendar
45
56
67 class DateTimeValues(object):
7 """Class that defines the date time values interface."""
8 """Defines the date time values interface.
9
10 This is the super class of different date and time representations.
11
12 Attributes:
13 is_local_time (bool): True if the date and time value is in local time.
14 precision (str): precision of the date and time value, which should
15 be one of the PRECISION_VALUES in definitions.
16 """
817
918 _DAYS_PER_MONTH = (31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
19
20 # The number of seconds in a day
21 _SECONDS_PER_DAY = 24 * 60 * 60
22
23 def __init__(self):
24 """Initializes date time values."""
25 super(DateTimeValues, self).__init__()
26 self.is_local_time = False
27 self.precision = None
1028
1129 def _CopyDateFromString(self, date_string):
1230 """Copies a date from a string.
1331
1432 Args:
15 date_string: a string containing a date value formatted as:
16 YYYY-MM-DD
17
18 Returns:
19 A tuple of integers containing year, month, day of month.
33 date_string (str): date value formatted as: YYYY-MM-DD
34
35 Returns:
36 tuple[int, int, int]: year, month, day of month.
2037
2138 Raises:
2239 ValueError: if the date string is invalid or not supported.
2441 date_string_length = len(date_string)
2542
2643 # The date string should at least contain 'YYYY-MM-DD'.
27 if (date_string_length < 10 or date_string[4] != u'-' or
28 date_string[7] != u'-'):
44 if date_string_length < 10:
45 raise ValueError(u'Date string too short.')
46
47 if date_string[4] != u'-' or date_string[7] != u'-':
2948 raise ValueError(u'Invalid date string.')
3049
3150 try:
4968
5069 return year, month, day_of_month
5170
71 def _CopyDateTimeFromString(self, time_string):
72 """Copies a date and time from a string.
73
74 Args:
75 time_string (str): date and time value formatted as:
76 YYYY-MM-DD hh:mm:ss.######[+-]##:##
77
78 Where # are numeric digits ranging from 0 to 9 and the seconds
79 fraction can be either 3 or 6 digits. The time of day, seconds
80 fraction and time zone offset are optional. The default time zone
81 is UTC.
82
83 Returns:
84 dict[str, int]: date and time values, such as year, month, day of month,
85 hours, minutes, seconds, microseconds.
86
87 Raises:
88 ValueError: if the time string is invalid or not supported.
89 """
90 if not time_string:
91 raise ValueError(u'Invalid time string.')
92
93 time_string_length = len(time_string)
94
95 year, month, day_of_month = self._CopyDateFromString(time_string)
96
97 if time_string_length <= 10:
98 return {
99 u'year': year,
100 u'month': month,
101 u'day_of_month': day_of_month}
102
103 # If a time of day is specified the time string it should at least
104 # contain 'YYYY-MM-DD hh:mm:ss'.
105 if time_string[10] != u' ':
106 raise ValueError(
107 u'Invalid time string - space missing as date and time separator.')
108
109 hours, minutes, seconds, microseconds, time_zone_offset = (
110 self._CopyTimeFromString(time_string[11:]))
111
112 if time_zone_offset:
113 time_zone_hours, time_zone_minutes = divmod(time_zone_offset, 60)
114
115 minutes += time_zone_minutes
116
117 # Since divmod makes sure the sign of time_zone_minutes is positive
118 # we only need to check the upper bound here, because time_zone_hours
119 # remains signed it is corrected accordingly.
120 if minutes >= 60:
121 minutes -= 60
122 hours += 1
123
124 hours += time_zone_hours
125 if hours < 0:
126 hours += 24
127 day_of_month -= 1
128
129 elif hours >= 24:
130 hours -= 24
131 day_of_month += 1
132
133 days_per_month = self._GetDaysPerMonth(year, month)
134 if day_of_month < 1:
135 month -= 1
136 if month < 1:
137 month = 12
138 year -= 1
139
140 day_of_month += self._GetDaysPerMonth(year, month)
141
142 elif day_of_month > days_per_month:
143 month += 1
144 if month > 12:
145 month = 1
146 year += 1
147
148 day_of_month -= days_per_month
149
150 date_time_values = {
151 u'year': year,
152 u'month': month,
153 u'day_of_month': day_of_month,
154 u'hours': hours,
155 u'minutes': minutes,
156 u'seconds': seconds}
157
158 if microseconds is not None:
159 date_time_values[u'microseconds'] = microseconds
160 return date_time_values
161
52162 def _CopyTimeFromString(self, time_string):
53163 """Copies a time from a string.
54164
55165 Args:
56 time_string: a string containing a time value formatted as:
57 hh:mm:ss.######[+-]##:##
58 Where # are numeric digits ranging from 0 to 9 and the
59 seconds fraction can be either 3 or 6 digits. The seconds
60 fraction and timezone offset are optional.
61
62 Returns:
63 A tuple of integers containing hours, minutes, seconds, microseconds,
64 timezone offset in seconds.
166 time_string (str): time value formatted as:
167 hh:mm:ss.######[+-]##:##
168
169 Where # are numeric digits ranging from 0 to 9 and the seconds
170 fraction can be either 3 or 6 digits. The seconds fraction and
171 time zone offset are optional.
172
173 Returns:
174 tuple[int, int, int, int, int]: hours, minutes, seconds, microseconds,
175 time zone offset in minutes.
65176
66177 Raises:
67178 ValueError: if the time string is invalid or not supported.
68179 """
69180 time_string_length = len(time_string)
70181
182 if time_string_length < 8:
183 raise ValueError(u'Time string too short.')
184
71185 # The time string should at least contain 'hh:mm:ss'.
72 if (time_string_length < 8 or time_string[2] != u':' or
73 time_string[5] != u':'):
186 if time_string[2] != u':' or time_string[5] != u':':
74187 raise ValueError(u'Invalid time string.')
75188
76189 try:
97210 if seconds not in range(0, 60):
98211 raise ValueError(u'Seconds value out of bounds.')
99212
100 micro_seconds = 0
101 timezone_offset = 0
213 microseconds = None
214 time_zone_offset = None
102215
103216 if time_string_length > 8:
104217 if time_string[8] != u'.':
105 timezone_index = 8
218 time_zone_string_index = 8
106219 else:
107 for timezone_index in range(8, time_string_length):
108 if time_string[timezone_index] in (u'+', u'-'):
220 for time_zone_string_index in range(8, time_string_length):
221 if time_string[time_zone_string_index] in (u'+', u'-'):
109222 break
110223
111 # The calculation that follow rely on the timezone index to point
112 # beyond the string in case no timezone offset was defined.
113 if timezone_index == time_string_length - 1:
114 timezone_index += 1
115
116 if timezone_index > 8:
117 fraction_of_seconds_length = timezone_index - 9
224 # The calculations that follow rely on the time zone string index
225 # to point beyond the string in case no time zone offset was defined.
226 if time_zone_string_index == time_string_length - 1:
227 time_zone_string_index += 1
228
229 if time_zone_string_index > 8:
230 fraction_of_seconds_length = time_zone_string_index - 9
118231 if fraction_of_seconds_length not in (3, 6):
119232 raise ValueError(u'Invalid time string.')
120233
121234 try:
122 micro_seconds = int(time_string[9:timezone_index], 10)
235 microseconds = int(time_string[9:time_zone_string_index], 10)
123236 except ValueError:
124237 raise ValueError(u'Unable to parse fraction of seconds.')
125238
126239 if fraction_of_seconds_length == 3:
127 micro_seconds *= 1000
128
129 if timezone_index < time_string_length:
130 if (time_string_length - timezone_index != 6 or
131 time_string[timezone_index + 3] != u':'):
240 microseconds *= 1000
241
242 if time_zone_string_index < time_string_length:
243 if (time_string_length - time_zone_string_index != 6 or
244 time_string[time_zone_string_index + 3] != u':'):
132245 raise ValueError(u'Invalid time string.')
133246
134247 try:
135 timezone_offset = int(
136 time_string[timezone_index + 1:timezone_index + 3])
248 time_zone_offset = int(time_string[
249 time_zone_string_index + 1:time_zone_string_index + 3])
137250 except ValueError:
138 raise ValueError(u'Unable to parse timezone hours offset.')
139
140 if timezone_offset not in range(0, 24):
251 raise ValueError(u'Unable to parse time zone hours offset.')
252
253 if time_zone_offset not in range(0, 24):
141254 raise ValueError(u'Timezone hours offset value out of bounds.')
142255
143 timezone_offset *= 60
256 time_zone_offset *= 60
144257
145258 try:
146 timezone_offset += int(
147 time_string[timezone_index + 4:timezone_index + 6])
259 time_zone_offset += int(time_string[
260 time_zone_string_index + 4:time_zone_string_index + 6])
148261 except ValueError:
149 raise ValueError(u'Unable to parse timezone minutes offset.')
150
151 # Note that when the sign of the timezone offset is negative
262 raise ValueError(u'Unable to parse time zone minutes offset.')
263
264 # Note that when the sign of the time zone offset is negative
152265 # the difference needs to be added. We do so by flipping the sign.
153 if time_string[timezone_index] == u'-':
154 timezone_offset *= 60
155 else:
156 timezone_offset *= -60
157
158 return hours, minutes, seconds, micro_seconds, timezone_offset
266 if time_string[time_zone_string_index] != u'-':
267 time_zone_offset *= -1
268
269 return hours, minutes, seconds, microseconds, time_zone_offset
270
271 def _GetDayOfYear(self, year, month, day_of_month):
272 """Retrieves the day of the year for a specific day of a month in a year.
273
274 Args:
275 year (int): year e.g. 1970.
276 month (int): month where 1 represents January.
277 day_of_month (int): day of the month where 1 represents the first day.
278
279 Returns:
280 int: day of year.
281
282 Raises:
283 ValueError: if the month or day of month value is out of bounds.
284 """
285 if month not in range(1, 13):
286 raise ValueError(u'Month value out of bounds.')
287
288 days_per_month = self._GetDaysPerMonth(year, month)
289 if day_of_month < 1 or day_of_month > days_per_month:
290 raise ValueError(u'Day of month value out of bounds.')
291
292 day_of_year = day_of_month
293 for past_month in range(1, month):
294 day_of_year += self._GetDaysPerMonth(year, past_month)
295
296 return day_of_year
159297
160298 def _GetDaysPerMonth(self, year, month):
161299 """Retrieves the number of days in a month of a specific year.
162300
163301 Args:
164 year: an integer containing the year.
165 month: an integer containing the month ranging from 1 to 12.
166
167 Returns:
168 An integer containing the number of days in the month.
302 year (int): year e.g. 1970.
303 month (int): month ranging from 1 to 12.
304
305 Returns:
306 int: number of days in the month.
169307
170308 Raises:
171309 ValueError: if the month value is out of bounds.
179317
180318 return days_per_month
181319
320 def _GetNumberOfDaysInYear(self, year):
321 """Retrieves the number of days in a specific year.
322
323 Args:
324 year (int): year e.g. 1970.
325
326 Returns:
327 int: number of days in the year.
328 """
329 if self._IsLeapYear(year):
330 return 366
331 return 365
332
333 def _GetNumberOfSecondsFromElements(
334 self, year, month, day, hours, minutes, seconds):
335 """Retrieves the number of seconds from the data and time elements.
336
337 Args:
338 year (int): year e.g. 1970.
339 month (int): month of year.
340 day(int): day of month.
341 hours (int): hours.
342 minutes (int): minutes.
343 seconds (int): seconds.
344
345 Returns:
346 int: number of seconds since January 1, 1970 00:00:00 or None.
347 """
348 if not year or not month or not day:
349 return
350
351 # calendar.timegm requires the time tuple to contain at least
352 # 6 integer values.
353 time_elements_tuple = (
354 year, month, day, hours or 0, minutes or 0, seconds or 0)
355
356 try:
357 number_of_seconds = calendar.timegm(time_elements_tuple)
358 except ValueError:
359 return
360
361 return int(number_of_seconds)
362
182363 def _IsLeapYear(self, year):
183364 """Determines if a year is a leap year.
184365
185366 Args:
186 year: an integer containing the year.
367 year (int): year e.g. 1970.
187368
188369 Returns:
189370 A boolean value indicating if the year is a leap year.
191372 return (year % 4 == 0 and year % 100 != 0) or year % 400 == 0
192373
193374 @abc.abstractmethod
375 def CopyFromString(self, time_string):
376 """Copies a date time value from a string containing a date and time value.
377
378 Args:
379 time_string (str): date and time value formatted as:
380 YYYY-MM-DD hh:mm:ss.######[+-]##:##
381
382 Where # are numeric digits ranging from 0 to 9 and the seconds
383 fraction can be either 3 or 6 digits. The time of day, seconds
384 fraction and time zone offset are optional. The default time zone
385 is UTC.
386
387 Raises:
388 ValueError: if the time string is invalid or not supported.
389 """
390
194391 def CopyToStatTimeTuple(self):
195 """Copies the timestamp to a stat timestamp tuple.
196
197 Returns:
198 A tuple of an integer containing a POSIX timestamp in seconds
199 and an integer containing the remainder in 100 nano seconds or
200 None on error.
201 """
392 """Copies the date time value to a stat timestamp tuple.
393
394 Returns:
395 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
396 100 nano seconds or (None, None) on error.
397 """
398
399 # TODO: remove this method when there is no more need for it in plaso.
400 @abc.abstractmethod
401 def GetPlasoTimestamp(self):
402 """Retrieves a timestamp that is compatible with plaso.
403
404 Returns:
405 int: a POSIX timestamp in microseconds or None on error.
406 """
00 # -*- coding: utf-8 -*-
11 """POSIX timestamp implementation."""
22
3 from dfdatetime import definitions
34 from dfdatetime import interface
45
56
1415 are known to be used.
1516
1617 Attributes:
17 timestamp: the POSIX timestamp.
18 micro_seconds: the number of micro seconds
18 is_local_time (bool): True if the date and time value is in local time.
19 micro_seconds (int): number of microseconds
20 precision (str): precision of the date and time value, which should
21 be one of the PRECISION_VALUES in definitions.
22 timestamp (int): POSIX timestamp.
1923 """
2024
21 def __init__(self, timestamp, micro_seconds=0):
22 """Initializes the POSIX timestamp object.
25 def __init__(self, microseconds=None, timestamp=None):
26 """Initializes a POSIX timestamp.
2327
2428 Args:
25 timestamp: the FILETIME timestamp.
26 micro_seconds: optional number of micro seconds.
29 micro_seconds (Optional[int]): number of microseconds.
30 timestamp (Optional[int]): POSIX timestamp.
2731 """
2832 super(PosixTime, self).__init__()
29 self.micro_seconds = micro_seconds
33 self.microseconds = microseconds
34 if microseconds is not None:
35 self.precision = definitions.PRECISION_1_MICROSECOND
36 else:
37 self.precision = definitions.PRECISION_1_SECOND
3038 self.timestamp = timestamp
3139
40 def CopyFromString(self, time_string):
41 """Copies a POSIX timestamp from a string containing a date and time value.
42
43 Args:
44 time_string (str): date and time value formatted as:
45 YYYY-MM-DD hh:mm:ss.######[+-]##:##
46
47 Where # are numeric digits ranging from 0 to 9 and the seconds
48 fraction can be either 3 or 6 digits. The time of day, seconds
49 fraction and time zone offset are optional. The default time zone
50 is UTC.
51 """
52 date_time_values = self._CopyDateTimeFromString(time_string)
53
54 year = date_time_values.get(u'year', 0)
55 month = date_time_values.get(u'month', 0)
56 day_of_month = date_time_values.get(u'day_of_month', 0)
57 hours = date_time_values.get(u'hours', 0)
58 minutes = date_time_values.get(u'minutes', 0)
59 seconds = date_time_values.get(u'seconds', 0)
60
61 self.timestamp = self._GetNumberOfSecondsFromElements(
62 year, month, day_of_month, hours, minutes, seconds)
63 self.microseconds = date_time_values.get(u'microseconds', None)
64
65 if self.microseconds is not None:
66 self.precision = definitions.PRECISION_1_MICROSECOND
67 else:
68 self.precision = definitions.PRECISION_1_SECOND
69
70 self.is_local_time = False
71
3272 def CopyToStatTimeTuple(self):
33 """Copies the timestamp to a stat timestamp tuple.
73 """Copies the POSIX timestamp to a stat timestamp tuple.
3474
3575 Returns:
36 A tuple of an integer containing a POSIX timestamp in seconds
37 and an integer containing the remainder in 100 nano seconds or
38 None on error.
76 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
77 100 nano seconds or (None, None) on error.
3978 """
40 return self.timestamp, self.micro_seconds * 10
79 if self.timestamp is None:
80 return None, None
81
82 if self.microseconds is not None:
83 return self.timestamp, self.microseconds * 10
84
85 return self.timestamp, None
86
87 def GetPlasoTimestamp(self):
88 """Retrieves a timestamp that is compatible with plaso.
89
90 Returns:
91 int: a POSIX timestamp in microseconds or None on error.
92 """
93 if self.timestamp is None:
94 return
95
96 if self.microseconds is not None:
97 return (self.timestamp * 1000000) + self.microseconds
98
99 return self.timestamp * 1000000
0 # -*- coding: utf-8 -*-
1 """Semantic time implementation."""
2
3 from dfdatetime import interface
4
5
6 class SemanticTime(interface.DateTimeValues):
7 """Class that implements semantic time.
8
9 Semantic time is term to describe date and time values that have specific
10 meaning such as: "Never", "Yesterday", "Not set".
11
12 Attributes:
13 is_local_time (bool): True if the date and time value is in local time.
14 precision (str): precision of the date and time value, which should
15 be one of the PRECISION_VALUES in definitions.
16 string (str): semantic representation of the time, such as:
17 "Never", "Not set".
18 """
19
20 def __init__(self, string=None):
21 """Initializes a semantic time.
22
23 Args:
24 string (str): semantic representation of the time, such as:
25 "Never", "Not set".
26 """
27 super(SemanticTime, self).__init__()
28 self.string = string
29
30 def CopyFromString(self, time_string):
31 """Copies semantic time from a string containing a date and time value.
32
33 Args:
34 time_string (str): semantic representation of the time, such as:
35 "Never", "Not set".
36
37 Raises:
38 ValueError: because semantic time cannot be copied from a string.
39 """
40 self.string = time_string
41
42 def CopyToStatTimeTuple(self):
43 """Copies the semantic timestamp to a stat timestamp tuple.
44
45 Returns:
46 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
47 100 nano seconds, which will always be 0, 0.
48 """
49 return 0, 0
50
51 def GetPlasoTimestamp(self):
52 """Retrieves a timestamp that is compatible with plaso.
53
54 Returns:
55 int: a POSIX timestamp in microseconds, which will always be 0.
56 """
57 return 0
0 # -*- coding: utf-8 -*-
1 """SYSTEMTIME structure implementation."""
2
3 from dfdatetime import definitions
4 from dfdatetime import interface
5
6
7 class Systemtime(interface.DateTimeValues):
8 """Class that implements a SYSTEMTIME structure.
9
10 The SYSTEMTIME structure is 16 bytes of size and contains:
11
12 struct {
13 WORD year,
14 WORD month,
15 WORD day_of_week,
16 WORD day,
17 WORD hour,
18 WORD minute,
19 WORD second,
20 WORD millisecond
21 }
22
23 Attributes:
24 year (int): year, 1601 through 30827.
25 month (int): month of year, 1 through 12.
26 day_of_week (int): day of week, 0 through 6.
27 day_of_month (int): day of month, 1 through 31.
28 hours (int): hours, 0 through 23.
29 minutes (int): minutes, 0 through 59.
30 seconds (int): seconds, 0 through 59.
31 milliseconds (int): milliseconds, 0 through 999.
32 """
33
34 def __init__(
35 self, day_of_month=0, day_of_week=0, hours=0, milliseconds=0,
36 minutes=0, month=0, seconds=0, year=0):
37 """Initializes a SYSTEMTIME structure.
38
39 Args:
40 day_of_month (Optional[int]): day of month, 1 through 31.
41 day_of_week (Optional[int]): day of week, 0 through 6.
42 hours (Optional[int]): hours, 0 through 23.
43 milliseconds (Optional[int]): milliseconds, 0 through 999.
44 minutes (Optional[int]): minutes, 0 through 59.
45 month (Optional[int]): month of year, 1 through 12.
46 seconds (Optional[int]): seconds, 0 through 59.
47 year (Optional[int]): year, 1601 through 30827.
48 """
49 super(Systemtime, self).__init__()
50 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
51 year, month, day_of_month, hours, minutes, seconds)
52 self.day_of_month = day_of_month
53 self.day_of_week = day_of_week
54 self.hours = hours
55 self.milliseconds = milliseconds
56 self.minutes = minutes
57 self.month = month
58 self.precision = definitions.PRECISION_1_MILLISECOND
59 self.seconds = seconds
60 self.year = year
61
62 def CopyFromString(self, time_string):
63 """Copies a SYSTEMTIME from a string containing a date and time value.
64
65 Args:
66 time_string (str): date and time value formatted as:
67 YYYY-MM-DD hh:mm:ss.######[+-]##:##
68
69 Where # are numeric digits ranging from 0 to 9 and the seconds
70 fraction can be either 3 or 6 digits. The time of day, seconds
71 fraction and time zone offset are optional. The default time zone
72 is UTC.
73
74 Raises:
75 ValueError: if the date string is invalid or not supported.
76 """
77 date_time_values = self._CopyDateTimeFromString(time_string)
78
79 year = date_time_values.get(u'year', 0)
80 month = date_time_values.get(u'month', 0)
81 day_of_month = date_time_values.get(u'day_of_month', 0)
82 hours = date_time_values.get(u'hours', 0)
83 minutes = date_time_values.get(u'minutes', 0)
84 seconds = date_time_values.get(u'seconds', 0)
85
86 microseconds = date_time_values.get(u'microseconds', 0)
87 milliseconds, _ = divmod(microseconds, 1000)
88
89 if year < 1601 or year > 30827:
90 raise ValueError(u'Unsupported year value: {0:d}.'.format(year))
91
92 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
93 year, month, day_of_month, hours, minutes, seconds)
94
95 self.year = year
96 self.month = month
97 self.day_of_month = day_of_month
98 # TODO: calculate day of week on demand.
99 self.day_of_week = None
100 self.hours = hours
101 self.minutes = minutes
102 self.seconds = seconds
103 self.milliseconds = milliseconds
104
105 self.is_local_time = False
106
107 def CopyToStatTimeTuple(self):
108 """Copies the SYSTEMTIME structure to a stat timestamp tuple.
109
110 Returns:
111 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
112 100 nano seconds or (None, None) on error.
113 """
114 if self._number_of_seconds is None:
115 return None, None
116
117 return self._number_of_seconds, self.milliseconds * 1000
118
119 def GetPlasoTimestamp(self):
120 """Retrieves a timestamp that is compatible with plaso.
121
122 Returns:
123 int: a POSIX timestamp in microseconds or None on error.
124 """
125 if self._number_of_seconds is None:
126 return
127
128 return ((self._number_of_seconds * 1000) + self.milliseconds) * 1000
00 # -*- coding: utf-8 -*-
11 """Time elements implementation."""
22
3 import calendar
4
3 from dfdatetime import definitions
54 from dfdatetime import interface
65
76
87 class TimeElements(interface.DateTimeValues):
9 """Class that implements time elements."""
8 """Class that implements time elements.
109
11 def __init__(self, time_elements_tuple):
12 """Initializes a time elements object.
10 Time elements contain separate values for year, month, day of month,
11 hours, minutes and seconds.
12
13 Attributes:
14 is_local_time (bool): True if the date and time value is in local time.
15 precision (str): precision of the date and time value, which should
16 be one of the PRECISION_VALUES in definitions.
17 """
18
19 def __init__(self, time_elements_tuple=None):
20 """Initializes time elements.
1321
1422 Args:
15 time_elements_tuple: a named tuple containg the time elements.
23 time_elements_tuple (Optional[tuple[int, int, int, int, int, int]]):
24 time elements, contains year, month, day of month, hours, minutes and
25 seconds.
1626 """
1727 super(TimeElements, self).__init__()
28 if time_elements_tuple is None:
29 self._number_of_seconds = None
30 else:
31 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
32 *time_elements_tuple)
1833 self._time_elements_tuple = time_elements_tuple
34 self.precision = definitions.PRECISION_1_SECOND
35
36 def CopyFromString(self, time_string):
37 """Copies time elements from a string containing a date and time value.
38
39 Args:
40 time_string (str): date and time value formatted as:
41 YYYY-MM-DD hh:mm:ss.######[+-]##:##
42
43 Where # are numeric digits ranging from 0 to 9 and the seconds
44 fraction can be either 3 or 6 digits. The time of day, seconds
45 fraction and time zone offset are optional. The default time zone
46 is UTC.
47 """
48 date_time_values = self._CopyDateTimeFromString(time_string)
49
50 year = date_time_values.get(u'year', 0)
51 month = date_time_values.get(u'month', 0)
52 day_of_month = date_time_values.get(u'day_of_month', 0)
53 hours = date_time_values.get(u'hours', 0)
54 minutes = date_time_values.get(u'minutes', 0)
55 seconds = date_time_values.get(u'seconds', 0)
56
57 self._time_elements_tuple = (
58 year, month, day_of_month, hours, minutes, seconds)
59 self._number_of_seconds = self._GetNumberOfSecondsFromElements(
60 year, month, day_of_month, hours, minutes, seconds)
61
62 self.is_local_time = False
1963
2064 def CopyToStatTimeTuple(self):
21 """Copies the timestamp to a stat timestamp tuple.
65 """Copies the time elements to a stat timestamp tuple.
2266
2367 Returns:
24 A tuple of an integer containing a POSIX timestamp in seconds
25 and an integer containing the remainder in 100 nano seconds or
26 Currently the remainder will always be 0.
68 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
69 100 nano seconds or (None, None) on error.
2770 """
28 return calendar.timegm(self._time_elements_tuple), 0
71 if self._number_of_seconds is None:
72 return None, None
73 return self._number_of_seconds, None
74
75 def GetPlasoTimestamp(self):
76 """Retrieves a timestamp that is compatible with plaso.
77
78 Returns:
79 int: a POSIX timestamp in microseconds or None on error.
80 """
81 if self._number_of_seconds is None:
82 return
83 return self._number_of_seconds * 1000000
0 # -*- coding: utf-8 -*-
1 """WebKit timestamp implementation."""
2
3 from dfdatetime import definitions
4 from dfdatetime import interface
5
6
7 class WebKitTime(interface.DateTimeValues):
8 """Class that implements a WebKit timestamp.
9
10 The WebKit timestamp is a unsigned 64-bit integer that contains the number of
11 micro seconds since 1601-01-01 00:00:00.
12
13 Attributes:
14 is_local_time (bool): True if the date and time value is in local time.
15 precision (str): precision of the date and time value, which should
16 be one of the PRECISION_VALUES in definitions.
17 """
18
19 # The difference between Jan 1, 1601 and Jan 1, 1970 in seconds.
20 _WEBKIT_TO_POSIX_BASE = 11644473600
21 _UINT64_MAX = (1 << 64) - 1
22
23 def __init__(self, timestamp=None):
24 """Initializes a WebKit timestamp.
25
26 Args:
27 timestamp (Optional[int]): WebKit timestamp.
28 """
29 super(WebKitTime, self).__init__()
30 self.precision = definitions.PRECISION_1_MICROSECOND
31 self.timestamp = timestamp
32
33 def CopyFromString(self, time_string):
34 """Copies a WebKit timestamp from a string containing a date and time value.
35
36 Args:
37 time_string (str): date and time value formatted as:
38 YYYY-MM-DD hh:mm:ss.######[+-]##:##
39
40 Where # are numeric digits ranging from 0 to 9 and the seconds
41 fraction can be either 3 or 6 digits. The time of day, seconds
42 fraction and time zone offset are optional. The default time zone
43 is UTC.
44
45 Raises:
46 ValueError: if the time string is invalid or not supported.
47 """
48 date_time_values = self._CopyDateTimeFromString(time_string)
49
50 year = date_time_values.get(u'year', 0)
51 month = date_time_values.get(u'month', 0)
52 day_of_month = date_time_values.get(u'day_of_month', 0)
53 hours = date_time_values.get(u'hours', 0)
54 minutes = date_time_values.get(u'minutes', 0)
55 seconds = date_time_values.get(u'seconds', 0)
56
57 if year < 1601:
58 raise ValueError(u'Year value not supported: {0!s}.'.format(year))
59
60 self.timestamp = self._GetNumberOfSecondsFromElements(
61 year, month, day_of_month, hours, minutes, seconds)
62 self.timestamp += self._WEBKIT_TO_POSIX_BASE
63 self.timestamp *= 1000000
64 self.timestamp += date_time_values.get(u'microseconds', 0)
65
66 self.precision = definitions.PRECISION_1_MICROSECOND
67 self.is_local_time = False
68
69 def CopyToStatTimeTuple(self):
70 """Copies the WebKit timestamp to a stat timestamp tuple.
71
72 Returns:
73 tuple[int, int]: a POSIX timestamp in seconds and the remainder in
74 100 nano seconds or (None, None) on error.
75 """
76 if (self.timestamp is None or self.timestamp < 0 or
77 self.timestamp > self._UINT64_MAX):
78 return None, None
79
80 timestamp, remainder = divmod(self.timestamp, 1000000)
81 timestamp -= self._WEBKIT_TO_POSIX_BASE
82 return timestamp, remainder * 10
83
84 def GetPlasoTimestamp(self):
85 """Retrieves a timestamp that is compatible with plaso.
86
87 Returns:
88 int: a POSIX timestamp in microseconds or None on error.
89 """
90 if (self.timestamp is None or self.timestamp < 0 or
91 self.timestamp > self._UINT64_MAX):
92 return
93
94 return self.timestamp - (self._WEBKIT_TO_POSIX_BASE * 1000000)
+0
-14
dfdatetime.egg-info/PKG-INFO less more
0 Metadata-Version: 1.1
1 Name: dfdatetime
2 Version: 20160323
3 Summary: Digital Forensics Date and Time (dfDateTime).
4 Home-page: https://github.com/log2timeline/dfdatetime
5 Author: dfDateTime development team
6 Author-email: log2timeline-dev@googlegroups.com
7 License: Apache License, Version 2.0
8 Description: dfDateTime, or Digital Forensics date and time, provides date and time objects to preserve accuracy and precision.
9 Platform: UNKNOWN
10 Classifier: Development Status :: 3 - Alpha
11 Classifier: Environment :: Console
12 Classifier: Operating System :: OS Independent
13 Classifier: Programming Language :: Python
+0
-32
dfdatetime.egg-info/SOURCES.txt less more
0 ACKNOWLEDGEMENTS
1 AUTHORS
2 LICENSE
3 MANIFEST.in
4 README
5 run_tests.py
6 setup.cfg
7 setup.py
8 config/dpkg/changelog
9 config/dpkg/compat
10 config/dpkg/control
11 config/dpkg/copyright
12 config/dpkg/python-dfdatetime.docs
13 config/dpkg/python3-dfdatetime.docs
14 config/dpkg/rules
15 config/dpkg/source/format
16 dfdatetime/__init__.py
17 dfdatetime/fake_time.py
18 dfdatetime/filetime.py
19 dfdatetime/interface.py
20 dfdatetime/posix_time.py
21 dfdatetime/time_elements.py
22 dfdatetime.egg-info/PKG-INFO
23 dfdatetime.egg-info/SOURCES.txt
24 dfdatetime.egg-info/dependency_links.txt
25 dfdatetime.egg-info/top_level.txt
26 tests/__init__.py
27 tests/fake_time.py
28 tests/filetime.py
29 tests/interface.py
30 tests/posix_time.py
31 tests/time_elements.py
+0
-1
dfdatetime.egg-info/dependency_links.txt less more
0
+0
-1
dfdatetime.egg-info/top_level.txt less more
0 dfdatetime
0 pip >= 7.0.0
1 pytest
11 release = 1
22 packager = log2timeline development team <log2timeline-dev@googlegroups.com>
33 doc_files = ACKNOWLEDGEMENTS
4 AUTHORS
5 LICENSE
6 README
4 AUTHORS
5 LICENSE
6 README
77 build_requires = python-setuptools
8
9 [egg_info]
10 tag_build =
11 tag_date = 0
12 tag_svn_revision = 0
13
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the Cocoa timestamp implementation."""
3
4 import unittest
5
6 from dfdatetime import cocoa_time
7
8
9 class CocoaTimeTest(unittest.TestCase):
10 """Tests for the Cocoa timestamp."""
11
12 def testCopyFromString(self):
13 """Tests the CopyFromString function."""
14 cocoa_time_object = cocoa_time.CocoaTime()
15
16 expected_timestamp = 394934400.0
17 cocoa_time_object.CopyFromString(u'2013-07-08')
18 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
19
20 expected_timestamp = 395011845.0
21 cocoa_time_object.CopyFromString(u'2013-07-08 21:30:45')
22 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
23
24 expected_timestamp = 395011845.546875
25 cocoa_time_object.CopyFromString(u'2013-07-08 21:30:45.546875')
26 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
27
28 expected_timestamp = 395015445.546875
29 cocoa_time_object.CopyFromString(u'2013-07-08 21:30:45.546875-01:00')
30 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
31
32 expected_timestamp = 395008245.546875
33 cocoa_time_object.CopyFromString(u'2013-07-08 21:30:45.546875+01:00')
34 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
35
36 expected_timestamp = 86400.0
37 cocoa_time_object.CopyFromString(u'2001-01-02 00:00:00')
38 self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
39
40 def testCopyToStatTimeTuple(self):
41 """Tests the CopyToStatTimeTuple function."""
42 cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0)
43
44 expected_stat_time_tuple = (1373319045, 0)
45 stat_time_tuple = cocoa_time_object.CopyToStatTimeTuple()
46 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
47
48 cocoa_time_object = cocoa_time.CocoaTime()
49
50 expected_stat_time_tuple = (None, None)
51 stat_time_tuple = cocoa_time_object.CopyToStatTimeTuple()
52 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
53
54 def testGetPlasoTimestamp(self):
55 """Tests the GetPlasoTimestamp function."""
56 cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0)
57
58 expected_micro_posix_timestamp = 1373319045000000
59 micro_posix_timestamp = cocoa_time_object.GetPlasoTimestamp()
60 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
61
62 cocoa_time_object = cocoa_time.CocoaTime()
63
64 micro_posix_timestamp = cocoa_time_object.GetPlasoTimestamp()
65 self.assertIsNone(micro_posix_timestamp)
66
67
68 if __name__ == '__main__':
69 unittest.main()
00 #!/usr/bin/python
11 # -*- coding: utf-8 -*-
2 """Tests for the fake timestamp implementation."""
2 """Tests for the fake time implementation."""
33
44 import unittest
55
77
88
99 class FakeTimeTest(unittest.TestCase):
10 """Tests for the fake timestamp object."""
10 """Tests for the fake time."""
11
12 # pylint: disable=protected-access
13
14 def testCopyFromString(self):
15 """Tests the CopyFromString function."""
16 fake_time_object = fake_time.FakeTime()
17
18 expected_number_of_seconds = 1281571200
19 fake_time_object.CopyFromString(u'2010-08-12')
20 self.assertEqual(
21 fake_time_object._number_of_seconds, expected_number_of_seconds)
22 self.assertIsNone(fake_time_object._microseconds)
23
24 expected_number_of_seconds = 1281647191
25 fake_time_object.CopyFromString(u'2010-08-12 21:06:31')
26 self.assertEqual(
27 fake_time_object._number_of_seconds, expected_number_of_seconds)
28 self.assertIsNone(fake_time_object._microseconds)
29
30 expected_number_of_seconds = 1281647191
31 fake_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
32 self.assertEqual(
33 fake_time_object._number_of_seconds, expected_number_of_seconds)
34 self.assertEqual(fake_time_object._microseconds, 546875)
35
36 expected_number_of_seconds = 1281650791
37 fake_time_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
38 self.assertEqual(
39 fake_time_object._number_of_seconds, expected_number_of_seconds)
40 self.assertEqual(fake_time_object._microseconds, 546875)
41
42 expected_number_of_seconds = 1281643591
43 fake_time_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
44 self.assertEqual(
45 fake_time_object._number_of_seconds, expected_number_of_seconds)
46 self.assertEqual(fake_time_object._microseconds, 546875)
47
48 expected_number_of_seconds = -11644387200
49 fake_time_object.CopyFromString(u'1601-01-02 00:00:00')
50 self.assertEqual(
51 fake_time_object._number_of_seconds, expected_number_of_seconds)
52 self.assertIsNone(fake_time_object._microseconds)
1153
1254 def testCopyToStatTimeTuple(self):
1355 """Tests the CopyToStatTimeTuple function."""
1456 fake_time_object = fake_time.FakeTime()
57 fake_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
1558
16 expected_stat_time_tuple = (0, 0)
59 expected_stat_time_tuple = (1281647191, 5468750)
1760 stat_time_tuple = fake_time_object.CopyToStatTimeTuple()
18 self.assertNotEqual(stat_time_tuple, expected_stat_time_tuple)
61 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
62
63 fake_time_object = fake_time.FakeTime()
64 fake_time_object.CopyFromString(u'2010-08-12 21:06:31')
65
66 expected_stat_time_tuple = (1281647191, None)
67 stat_time_tuple = fake_time_object.CopyToStatTimeTuple()
68 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
69
70 fake_time_object = fake_time.FakeTime()
71 fake_time_object._number_of_seconds = None
72
73 expected_stat_time_tuple = (None, None)
74 stat_time_tuple = fake_time_object.CopyToStatTimeTuple()
75 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
76
77 def testGetPlasoTimestamp(self):
78 """Tests the GetPlasoTimestamp function."""
79 fake_time_object = fake_time.FakeTime()
80 fake_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
81
82 expected_micro_posix_number_of_seconds = 1281647191546875
83 micro_posix_number_of_seconds = fake_time_object.GetPlasoTimestamp()
84 self.assertEqual(
85 micro_posix_number_of_seconds, expected_micro_posix_number_of_seconds)
86
87 fake_time_object = fake_time.FakeTime()
88 fake_time_object.CopyFromString(u'2010-08-12 21:06:31')
89
90 expected_micro_posix_number_of_seconds = 1281647191000000
91 micro_posix_number_of_seconds = fake_time_object.GetPlasoTimestamp()
92 self.assertEqual(
93 micro_posix_number_of_seconds, expected_micro_posix_number_of_seconds)
94
95 fake_time_object = fake_time.FakeTime()
96 fake_time_object._number_of_seconds = None
97
98 micro_posix_number_of_seconds = fake_time_object.GetPlasoTimestamp()
99 self.assertIsNone(micro_posix_number_of_seconds)
19100
20101
21102 if __name__ == '__main__':
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the FAT date time implementation."""
3
4 import unittest
5
6 from dfdatetime import fat_date_time
7
8
9 class FATDateTime(unittest.TestCase):
10 """Tests for the FAT date time."""
11
12 # pylint: disable=protected-access
13
14 def testCopyFromString(self):
15 """Tests the CopyFromString function."""
16 fat_date_time_object = fat_date_time.FATDateTime()
17
18 expected_number_of_seconds = 966038400
19 fat_date_time_object.CopyFromString(u'2010-08-12')
20 self.assertEqual(
21 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
22
23 expected_number_of_seconds = 966114391
24 fat_date_time_object.CopyFromString(u'2010-08-12 21:06:31')
25 self.assertEqual(
26 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
27
28 expected_number_of_seconds = 966114391
29 fat_date_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
30 self.assertEqual(
31 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
32
33 expected_number_of_seconds = 966117991
34 fat_date_time_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
35 self.assertEqual(
36 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
37
38 expected_number_of_seconds = 966110791
39 fat_date_time_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
40 self.assertEqual(
41 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
42
43 expected_number_of_seconds = 86400
44 fat_date_time_object.CopyFromString(u'1980-01-02 00:00:00')
45 self.assertEqual(
46 fat_date_time_object._number_of_seconds, expected_number_of_seconds)
47
48 with self.assertRaises(ValueError):
49 fat_date_time_object.CopyFromString(u'2200-01-02 00:00:00')
50
51 def testGetNumberOfSeconds(self):
52 """Tests the _GetNumberOfSeconds function."""
53 fat_date_time_object = fat_date_time.FATDateTime()
54
55 fat_date_time_object._GetNumberOfSeconds(0xa8d03d0c)
56
57 # Invalid number of seconds.
58 test_fat_date_time = (0xa8d03d0c & ~(0x1f << 16)) | ((30 & 0x1f) << 16)
59 with self.assertRaises(ValueError):
60 fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
61
62 # Invalid number of minutes.
63 test_fat_date_time = (0xa8d03d0c & ~(0x3f << 21)) | ((60 & 0x3f) << 21)
64 with self.assertRaises(ValueError):
65 fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
66
67 # Invalid number of hours.
68 test_fat_date_time = (0xa8d03d0c & ~(0x1f << 27)) | ((24 & 0x1f) << 27)
69 with self.assertRaises(ValueError):
70 fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
71
72 # Invalid day of month.
73 test_fat_date_time = (0xa8d03d0c & ~0x1f) | (32 & 0x1f)
74 with self.assertRaises(ValueError):
75 fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
76
77 # Invalid month.
78 test_fat_date_time = (0xa8d03d0c & ~(0x0f << 5)) | ((13 & 0x0f) << 5)
79 with self.assertRaises(ValueError):
80 fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
81
82 def testCopyToStatTimeTuple(self):
83 """Tests the CopyToStatTimeTuple function."""
84 fat_date_time_object = fat_date_time.FATDateTime(fat_date_time=0xa8d03d0c)
85
86 expected_stat_time_tuple = (1281647192, None)
87 stat_time_tuple = fat_date_time_object.CopyToStatTimeTuple()
88 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
89
90 fat_date_time_object = fat_date_time.FATDateTime()
91
92 expected_stat_time_tuple = (None, None)
93 stat_time_tuple = fat_date_time_object.CopyToStatTimeTuple()
94 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
95
96 def testGetPlasoTimestamp(self):
97 """Tests the GetPlasoTimestamp function."""
98 fat_date_time_object = fat_date_time.FATDateTime(fat_date_time=0xa8d03d0c)
99
100 expected_micro_posix_timestamp = 1281647192000000
101 micro_posix_timestamp = fat_date_time_object.GetPlasoTimestamp()
102 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
103
104 fat_date_time_object = fat_date_time.FATDateTime()
105
106 micro_posix_timestamp = fat_date_time_object.GetPlasoTimestamp()
107 self.assertIsNone(micro_posix_timestamp)
108
109
110 if __name__ == '__main__':
111 unittest.main()
77
88
99 class FiletimeTest(unittest.TestCase):
10 """Tests for the FILETIME timestamp object."""
10 """Tests for the FILETIME timestamp."""
1111
1212 def testCopyFromString(self):
1313 """Tests the CopyFromString function."""
3737 filetime_object.CopyFromString(u'1601-01-02 00:00:00')
3838 self.assertEqual(filetime_object.timestamp, expected_timestamp)
3939
40 with self.assertRaises(ValueError):
41 filetime_object.CopyFromString(u'1500-01-02 00:00:00')
42
4043 def testCopyToStatTimeTuple(self):
4144 """Tests the CopyToStatTimeTuple function."""
42 filetime_object = filetime.Filetime()
43 filetime_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
45 filetime_object = filetime.Filetime(timestamp=0x01cb3a623d0a17ce)
4446
45 expected_stat_time_tuple = (1281643591, 5468750)
47 expected_stat_time_tuple = (1281647191, 5468750)
4648 stat_time_tuple = filetime_object.CopyToStatTimeTuple()
4749 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
50
51 filetime_object = filetime.Filetime(timestamp=0x1ffffffffffffffff)
52
53 expected_stat_time_tuple = (None, None)
54 stat_time_tuple = filetime_object.CopyToStatTimeTuple()
55 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
56
57 filetime_object = filetime.Filetime()
58
59 expected_stat_time_tuple = (None, None)
60 stat_time_tuple = filetime_object.CopyToStatTimeTuple()
61 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
62
63 def testGetPlasoTimestamp(self):
64 """Tests the GetPlasoTimestamp function."""
65 filetime_object = filetime.Filetime(timestamp=0x01cb3a623d0a17ce)
66
67 expected_micro_posix_timestamp = 1281647191546875
68 micro_posix_timestamp = filetime_object.GetPlasoTimestamp()
69 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
70
71 filetime_object = filetime.Filetime(timestamp=0x1ffffffffffffffff)
72
73 micro_posix_timestamp = filetime_object.GetPlasoTimestamp()
74 self.assertIsNone(micro_posix_timestamp)
75
76 filetime_object = filetime.Filetime()
77
78 micro_posix_timestamp = filetime_object.GetPlasoTimestamp()
79 self.assertIsNone(micro_posix_timestamp)
4880
4981
5082 if __name__ == '__main__':
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the HFS timestamp implementation."""
3
4 import unittest
5
6 from dfdatetime import hfs_time
7
8
9 class HFSTimeTest(unittest.TestCase):
10 """Tests for the HFS timestamp."""
11
12 def testCopyFromString(self):
13 """Tests the CopyFromString function."""
14 hfs_time_object = hfs_time.HFSTime()
15
16 expected_timestamp = 3458160000
17 hfs_time_object.CopyFromString(u'2013-08-01')
18 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
19
20 expected_timestamp = 3458215528
21 hfs_time_object.CopyFromString(u'2013-08-01 15:25:28')
22 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
23
24 expected_timestamp = 3458215528
25 hfs_time_object.CopyFromString(u'2013-08-01 15:25:28.546875')
26 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
27
28 expected_timestamp = 3458219128
29 hfs_time_object.CopyFromString(u'2013-08-01 15:25:28.546875-01:00')
30 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
31
32 expected_timestamp = 3458211928
33 hfs_time_object.CopyFromString(u'2013-08-01 15:25:28.546875+01:00')
34 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
35
36 expected_timestamp = 86400
37 hfs_time_object.CopyFromString(u'1904-01-02 00:00:00')
38 self.assertEqual(hfs_time_object.timestamp, expected_timestamp)
39
40 def testCopyToStatTimeTuple(self):
41 """Tests the CopyToStatTimeTuple function."""
42 hfs_time_object = hfs_time.HFSTime(timestamp=3458215528)
43
44 expected_stat_time_tuple = (1375370728, 0)
45 stat_time_tuple = hfs_time_object.CopyToStatTimeTuple()
46 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
47
48 hfs_time_object = hfs_time.HFSTime(timestamp=0x1ffffffff)
49
50 expected_stat_time_tuple = (None, None)
51 stat_time_tuple = hfs_time_object.CopyToStatTimeTuple()
52 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
53
54 hfs_time_object = hfs_time.HFSTime(timestamp=-0x1ffffffff)
55
56 expected_stat_time_tuple = (None, None)
57 stat_time_tuple = hfs_time_object.CopyToStatTimeTuple()
58 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
59
60 hfs_time_object = hfs_time.HFSTime()
61
62 expected_stat_time_tuple = (None, None)
63 stat_time_tuple = hfs_time_object.CopyToStatTimeTuple()
64 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
65
66 def testGetPlasoTimestamp(self):
67 """Tests the GetPlasoTimestamp function."""
68 hfs_time_object = hfs_time.HFSTime(timestamp=3458215528)
69
70 expected_micro_posix_timestamp = 1375370728000000
71 micro_posix_timestamp = hfs_time_object.GetPlasoTimestamp()
72 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
73
74 hfs_time_object = hfs_time.HFSTime(timestamp=0x1ffffffff)
75
76 micro_posix_timestamp = hfs_time_object.GetPlasoTimestamp()
77 self.assertIsNone(micro_posix_timestamp)
78
79 hfs_time_object = hfs_time.HFSTime(timestamp=-0x1ffffffff)
80
81 micro_posix_timestamp = hfs_time_object.GetPlasoTimestamp()
82 self.assertIsNone(micro_posix_timestamp)
83
84 hfs_time_object = hfs_time.HFSTime()
85
86 micro_posix_timestamp = hfs_time_object.GetPlasoTimestamp()
87 self.assertIsNone(micro_posix_timestamp)
88
89
90 if __name__ == '__main__':
91 unittest.main()
1212 # pylint: disable=protected-access
1313
1414 def testCopyDateFromString(self):
15 """Tests the CopyDateFromString function."""
15 """Tests the _CopyDateFromString function."""
1616 date_time_values = interface.DateTimeValues()
1717
1818 expected_date_tuple = (2010, 1, 1)
7272 with self.assertRaises(ValueError):
7373 date_time_values._CopyDateFromString(u'2010-04-31')
7474
75 def testCopyDateTimeFromString(self):
76 """Tests the _CopyDateTimeFromString function."""
77 date_time_values = interface.DateTimeValues()
78
79 expected_date_dict = {
80 u'year': 2010, u'month': 8, u'day_of_month': 12}
81 date_dict = date_time_values._CopyDateTimeFromString(u'2010-08-12')
82 self.assertEqual(date_dict, expected_date_dict)
83
84 expected_date_dict = {
85 u'year': 2010, u'month': 8, u'day_of_month': 12,
86 u'hours': 21, u'minutes': 6, u'seconds': 31}
87 date_dict = date_time_values._CopyDateTimeFromString(
88 u'2010-08-12 21:06:31')
89 self.assertEqual(date_dict, expected_date_dict)
90
91 expected_date_dict = {
92 u'year': 2010, u'month': 8, u'day_of_month': 12,
93 u'hours': 21, u'minutes': 6, u'seconds': 31, u'microseconds': 546875}
94 date_dict = date_time_values._CopyDateTimeFromString(
95 u'2010-08-12 21:06:31.546875')
96 self.assertEqual(date_dict, expected_date_dict)
97
98 expected_date_dict = {
99 u'year': 2010, u'month': 8, u'day_of_month': 12,
100 u'hours': 22, u'minutes': 6, u'seconds': 31, u'microseconds': 546875}
101 date_dict = date_time_values._CopyDateTimeFromString(
102 u'2010-08-12 21:06:31.546875-01:00')
103 self.assertEqual(date_dict, expected_date_dict)
104
105 expected_date_dict = {
106 u'year': 2010, u'month': 8, u'day_of_month': 12,
107 u'hours': 20, u'minutes': 6, u'seconds': 31, u'microseconds': 546875}
108 date_dict = date_time_values._CopyDateTimeFromString(
109 u'2010-08-12 21:06:31.546875+01:00')
110 self.assertEqual(date_dict, expected_date_dict)
111
112 expected_date_dict = {
113 u'year': 2010, u'month': 8, u'day_of_month': 12,
114 u'hours': 20, u'minutes': 6, u'seconds': 31, u'microseconds': 546875}
115 date_dict = date_time_values._CopyDateTimeFromString(
116 u'2010-08-12 21:06:31.546875+01:00')
117 self.assertEqual(date_dict, expected_date_dict)
118
119 # Test backwards date correction.
120 expected_date_dict = {
121 u'year': 2009, u'month': 12, u'day_of_month': 31,
122 u'hours': 23, u'minutes': 45, u'seconds': 0, u'microseconds': 123456}
123 date_dict = date_time_values._CopyDateTimeFromString(
124 u'2010-01-01 00:15:00.123456+00:30')
125 self.assertEqual(date_dict, expected_date_dict)
126
127 # Test forward date correction.
128 expected_date_dict = {
129 u'year': 2010, u'month': 1, u'day_of_month': 1,
130 u'hours': 1, u'minutes': 15, u'seconds': 0, u'microseconds': 123456}
131 date_dict = date_time_values._CopyDateTimeFromString(
132 u'2009-12-31 23:45:00.123456-01:30')
133 self.assertEqual(date_dict, expected_date_dict)
134
135 with self.assertRaises(ValueError):
136 date_time_values._CopyDateTimeFromString(u'')
137
138 with self.assertRaises(ValueError):
139 date_time_values._CopyDateTimeFromString(
140 u'2010-08-12T21:06:31.546875+01:00')
141
75142 def testCopyTimeFromString(self):
76 """Tests the CopyTimeFromString function."""
77 date_time_values = interface.DateTimeValues()
78
79 expected_time_tuple = (8, 4, 32, 0, 0)
143 """Tests the _CopyTimeFromString function."""
144 date_time_values = interface.DateTimeValues()
145
146 expected_time_tuple = (8, 4, 32, None, None)
80147 time_tuple = date_time_values._CopyTimeFromString(u'08:04:32')
81148 self.assertEqual(time_tuple, expected_time_tuple)
82149
83 expected_time_tuple = (20, 23, 56, 0, 0)
150 expected_time_tuple = (20, 23, 56, None, None)
84151 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56')
85152 self.assertEqual(time_tuple, expected_time_tuple)
86153
87 expected_time_tuple = (20, 23, 56, 0, -19800)
154 expected_time_tuple = (20, 23, 56, None, -330)
88155 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56+05:30')
89156 self.assertEqual(time_tuple, expected_time_tuple)
90157
91 expected_time_tuple = (20, 23, 56, 327000, 0)
158 expected_time_tuple = (20, 23, 56, 327000, None)
92159 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56.327')
93160 self.assertEqual(time_tuple, expected_time_tuple)
94161
95 expected_time_tuple = (20, 23, 56, 327000, -3600)
162 expected_time_tuple = (20, 23, 56, 327000, -60)
96163 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56.327+01:00')
97164 self.assertEqual(time_tuple, expected_time_tuple)
98165
99 expected_time_tuple = (20, 23, 56, 327124, 0)
166 expected_time_tuple = (20, 23, 56, 327124, None)
100167 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56.327124')
101168 self.assertEqual(time_tuple, expected_time_tuple)
102169
103 expected_time_tuple = (20, 23, 56, 327124, 18000)
170 expected_time_tuple = (20, 23, 56, 327124, 300)
104171 time_tuple = date_time_values._CopyTimeFromString(u'20:23:56.327124-05:00')
105172 self.assertEqual(time_tuple, expected_time_tuple)
106173
117184 date_time_values._CopyTimeFromString(u'24:00:00')
118185
119186 with self.assertRaises(ValueError):
187 date_time_values._CopyTimeFromString(u'12b00:00')
188
189 with self.assertRaises(ValueError):
190 date_time_values._CopyTimeFromString(u'12:00b00')
191
192 with self.assertRaises(ValueError):
120193 date_time_values._CopyTimeFromString(u'1s:00:00')
121194
122195 with self.assertRaises(ValueError):
131204 with self.assertRaises(ValueError):
132205 date_time_values._CopyTimeFromString(u'00:00:w0')
133206
207 with self.assertRaises(ValueError):
208 date_time_values._CopyTimeFromString(u'12:00:00.00')
209
210 with self.assertRaises(ValueError):
211 date_time_values._CopyTimeFromString(u'12:00:00.0000')
212
213 with self.assertRaises(ValueError):
214 date_time_values._CopyTimeFromString(u'12:00:00.00w')
215
216 with self.assertRaises(ValueError):
217 date_time_values._CopyTimeFromString(u'12:00:00+01b00')
218
219 with self.assertRaises(ValueError):
220 date_time_values._CopyTimeFromString(u'12:00:00+01:0w')
221
222 with self.assertRaises(ValueError):
223 date_time_values._CopyTimeFromString(u'12:00:00+0w:00')
224
225 with self.assertRaises(ValueError):
226 date_time_values._CopyTimeFromString(u'12:00:00+30:00')
227
228 def testGetDayOfYear(self):
229 """Tests the _GetDayOfYear function."""
230 date_time_values = interface.DateTimeValues()
231
232 day_of_year = date_time_values._GetDayOfYear(1999, 1, 1)
233 self.assertEqual(day_of_year, 1)
234
235 day_of_year = date_time_values._GetDayOfYear(1999, 4, 21)
236 self.assertEqual(day_of_year, 111)
237
238 day_of_year = date_time_values._GetDayOfYear(1999, 12, 31)
239 self.assertEqual(day_of_year, 365)
240
241 day_of_year = date_time_values._GetDayOfYear(2000, 1, 1)
242 self.assertEqual(day_of_year, 1)
243
244 day_of_year = date_time_values._GetDayOfYear(2000, 4, 21)
245 self.assertEqual(day_of_year, 112)
246
247 day_of_year = date_time_values._GetDayOfYear(2000, 12, 31)
248 self.assertEqual(day_of_year, 366)
249
250 with self.assertRaises(ValueError):
251 date_time_values._GetDayOfYear(1999, 0, 1)
252
253 with self.assertRaises(ValueError):
254 date_time_values._GetDayOfYear(1999, 13, 1)
255
256 with self.assertRaises(ValueError):
257 date_time_values._GetDayOfYear(1999, 1, 0)
258
259 with self.assertRaises(ValueError):
260 date_time_values._GetDayOfYear(1999, 1, 32)
261
134262 def testGetDaysPerMonth(self):
135 """Tests the GetDaysPerMonth function."""
263 """Tests the _GetDaysPerMonth function."""
136264 date_time_values = interface.DateTimeValues()
137265
138266 expected_days_per_month = list(interface.DateTimeValues._DAYS_PER_MONTH)
139267
140268 days_per_month = []
141269 for month in range(1, 13):
142 days_per_month.append(
143 date_time_values._GetDaysPerMonth(1999, month))
270 days_per_month.append(date_time_values._GetDaysPerMonth(1999, month))
144271
145272 self.assertEqual(days_per_month, expected_days_per_month)
146273
148275
149276 days_per_month = []
150277 for month in range(1, 13):
151 days_per_month.append(
152 date_time_values._GetDaysPerMonth(2000, month))
278 days_per_month.append(date_time_values._GetDaysPerMonth(2000, month))
153279
154280 self.assertEqual(days_per_month, expected_days_per_month)
155281
159285 with self.assertRaises(ValueError):
160286 date_time_values._GetDaysPerMonth(1999, 13)
161287
288 def testGetNumberOfDaysInYear(self):
289 """Tests the _GetNumberOfDaysInYear function."""
290 date_time_values = interface.DateTimeValues()
291
292 self.assertEqual(date_time_values._GetNumberOfDaysInYear(1999), 365)
293 self.assertEqual(date_time_values._GetNumberOfDaysInYear(2000), 366)
294 self.assertEqual(date_time_values._GetNumberOfDaysInYear(1996), 366)
295
296 def testGetNumberOfSecondsFromElements(self):
297 """Tests the _GetNumberOfSecondsFromElements function."""
298 date_time_values = interface.DateTimeValues()
299
300 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
301 2010, 8, 12, 0, 0, 0)
302 self.assertEqual(number_of_seconds, 1281571200)
303
304 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
305 2010, 8, 12, None, None, None)
306 self.assertEqual(number_of_seconds, 1281571200)
307
308 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
309 2010, 8, 12, 21, 6, 31)
310 self.assertEqual(number_of_seconds, 1281647191)
311
312 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
313 1601, 1, 2, 0, 0, 0)
314 self.assertEqual(number_of_seconds, -11644387200)
315
316 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
317 0, 1, 2, 0, 0, 0)
318 self.assertIsNone(number_of_seconds)
319
320 number_of_seconds = date_time_values._GetNumberOfSecondsFromElements(
321 2010, 13, 12, 21, 6, 31)
322 self.assertIsNone(number_of_seconds)
323
162324 def testIsLeapYear(self):
163 """Tests the IsLeapYear function."""
325 """Tests the _IsLeapYear function."""
164326 date_time_values = interface.DateTimeValues()
165327
166328 self.assertFalse(date_time_values._IsLeapYear(1999))
77
88
99 class PosixTimeTest(unittest.TestCase):
10 """Tests for the POSIX timestamp object."""
10 """Tests for the POSIX timestamp."""
11
12 def testCopyFromString(self):
13 """Tests the CopyFromString function."""
14 posix_time_object = posix_time.PosixTime()
15
16 expected_timestamp = 1281571200
17 posix_time_object.CopyFromString(u'2010-08-12')
18 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
19 self.assertIsNone(posix_time_object.microseconds)
20
21 expected_timestamp = 1281647191
22 posix_time_object.CopyFromString(u'2010-08-12 21:06:31')
23 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
24 self.assertIsNone(posix_time_object.microseconds)
25
26 expected_timestamp = 1281647191
27 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
28 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
29 self.assertEqual(posix_time_object.microseconds, 546875)
30
31 expected_timestamp = 1281650791
32 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
33 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
34 self.assertEqual(posix_time_object.microseconds, 546875)
35
36 expected_timestamp = 1281643591
37 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
38 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
39 self.assertEqual(posix_time_object.microseconds, 546875)
40
41 expected_timestamp = -11644387200
42 posix_time_object.CopyFromString(u'1601-01-02 00:00:00')
43 self.assertEqual(posix_time_object.timestamp, expected_timestamp)
44 self.assertIsNone(posix_time_object.microseconds)
1145
1246 def testCopyToStatTimeTuple(self):
1347 """Tests the CopyToStatTimeTuple function."""
14 posix_time_object = posix_time.PosixTime(1281643591, micro_seconds=546875)
48 posix_time_object = posix_time.PosixTime(
49 microseconds=546875, timestamp=1281643591)
1550
1651 expected_stat_time_tuple = (1281643591, 5468750)
1752 stat_time_tuple = posix_time_object.CopyToStatTimeTuple()
1853 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
1954
55 posix_time_object = posix_time.PosixTime(timestamp=1281643591)
56
57 expected_stat_time_tuple = (1281643591, None)
58 stat_time_tuple = posix_time_object.CopyToStatTimeTuple()
59 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
60
61 posix_time_object = posix_time.PosixTime()
62
63 expected_stat_time_tuple = (None, None)
64 stat_time_tuple = posix_time_object.CopyToStatTimeTuple()
65 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
66
67 def testGetPlasoTimestamp(self):
68 """Tests the GetPlasoTimestamp function."""
69 posix_time_object = posix_time.PosixTime(
70 microseconds=546875, timestamp=1281643591)
71
72 expected_micro_posix_timestamp = 1281643591546875
73 micro_posix_timestamp = posix_time_object.GetPlasoTimestamp()
74 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
75
76 posix_time_object = posix_time.PosixTime(timestamp=1281643591)
77
78 expected_micro_posix_timestamp = 1281643591000000
79 micro_posix_timestamp = posix_time_object.GetPlasoTimestamp()
80 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
81
82 posix_time_object = posix_time.PosixTime()
83
84 micro_posix_timestamp = posix_time_object.GetPlasoTimestamp()
85 self.assertIsNone(micro_posix_timestamp)
86
2087
2188 if __name__ == '__main__':
2289 unittest.main()
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the semantic time implementation."""
3
4 import unittest
5
6 from dfdatetime import semantic_time
7
8
9 class SemanticTimeTest(unittest.TestCase):
10 """Tests for semantic time."""
11
12 # pylint: disable=protected-access
13
14 def testCopyFromString(self):
15 """Tests the CopyFromString function."""
16 semantic_time_object = semantic_time.SemanticTime()
17
18 semantic_time_object.CopyFromString(u'Never')
19 self.assertEqual(semantic_time_object.string, u'Never')
20
21 def testCopyToStatTimeTuple(self):
22 """Tests the CopyToStatTimeTuple function."""
23 semantic_time_object = semantic_time.SemanticTime()
24
25 expected_stat_time_tuple = (0, 0)
26 stat_time_tuple = semantic_time_object.CopyToStatTimeTuple()
27 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
28
29 def testGetPlasoTimestamp(self):
30 """Tests the GetPlasoTimestamp function."""
31 semantic_time_object = semantic_time.SemanticTime()
32
33 expected_micro_posix_timestamp = 0
34 micro_posix_timestamp = semantic_time_object.GetPlasoTimestamp()
35 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
36
37
38 if __name__ == '__main__':
39 unittest.main()
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the SYSTEMTIME structure implementation."""
3
4 import unittest
5
6 from dfdatetime import systemtime
7
8
9 class FiletimeTest(unittest.TestCase):
10 """Tests for the SYSTEMTIME structure."""
11
12 # pylint: disable=protected-access
13
14 def testCopyFromString(self):
15 """Tests the CopyFromString function."""
16 systemtime_object = systemtime.Systemtime()
17
18 expected_number_of_seconds = 1281571200
19 systemtime_object.CopyFromString(u'2010-08-12')
20 self.assertEqual(
21 systemtime_object._number_of_seconds, expected_number_of_seconds)
22 self.assertEqual(systemtime_object.year, 2010)
23 self.assertEqual(systemtime_object.month, 8)
24 self.assertEqual(systemtime_object.day_of_month, 12)
25 self.assertEqual(systemtime_object.hours, 0)
26 self.assertEqual(systemtime_object.minutes, 0)
27 self.assertEqual(systemtime_object.seconds, 0)
28 self.assertEqual(systemtime_object.milliseconds, 0)
29
30 expected_number_of_seconds = 1281647191
31 systemtime_object.CopyFromString(u'2010-08-12 21:06:31')
32 self.assertEqual(
33 systemtime_object._number_of_seconds, expected_number_of_seconds)
34 self.assertEqual(systemtime_object.year, 2010)
35 self.assertEqual(systemtime_object.month, 8)
36 self.assertEqual(systemtime_object.day_of_month, 12)
37 self.assertEqual(systemtime_object.hours, 21)
38 self.assertEqual(systemtime_object.minutes, 6)
39 self.assertEqual(systemtime_object.seconds, 31)
40 self.assertEqual(systemtime_object.milliseconds, 0)
41
42 expected_number_of_seconds = 1281647191
43 systemtime_object.CopyFromString(u'2010-08-12 21:06:31.546875')
44 self.assertEqual(
45 systemtime_object._number_of_seconds, expected_number_of_seconds)
46 self.assertEqual(systemtime_object.year, 2010)
47 self.assertEqual(systemtime_object.month, 8)
48 self.assertEqual(systemtime_object.day_of_month, 12)
49 self.assertEqual(systemtime_object.hours, 21)
50 self.assertEqual(systemtime_object.minutes, 6)
51 self.assertEqual(systemtime_object.seconds, 31)
52 self.assertEqual(systemtime_object.milliseconds, 546)
53
54 expected_number_of_seconds = 1281650791
55 systemtime_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
56 self.assertEqual(
57 systemtime_object._number_of_seconds, expected_number_of_seconds)
58 self.assertEqual(systemtime_object.year, 2010)
59 self.assertEqual(systemtime_object.month, 8)
60 self.assertEqual(systemtime_object.day_of_month, 12)
61 self.assertEqual(systemtime_object.hours, 22)
62 self.assertEqual(systemtime_object.minutes, 6)
63 self.assertEqual(systemtime_object.seconds, 31)
64 self.assertEqual(systemtime_object.milliseconds, 546)
65
66 expected_number_of_seconds = 1281643591
67 systemtime_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
68 self.assertEqual(
69 systemtime_object._number_of_seconds, expected_number_of_seconds)
70 self.assertEqual(systemtime_object.year, 2010)
71 self.assertEqual(systemtime_object.month, 8)
72 self.assertEqual(systemtime_object.day_of_month, 12)
73 self.assertEqual(systemtime_object.hours, 20)
74 self.assertEqual(systemtime_object.minutes, 6)
75 self.assertEqual(systemtime_object.seconds, 31)
76 self.assertEqual(systemtime_object.milliseconds, 546)
77
78 expected_number_of_seconds = -11644387200
79 systemtime_object.CopyFromString(u'1601-01-02 00:00:00')
80 self.assertEqual(
81 systemtime_object._number_of_seconds, expected_number_of_seconds)
82 self.assertEqual(systemtime_object.year, 1601)
83 self.assertEqual(systemtime_object.month, 1)
84 self.assertEqual(systemtime_object.day_of_month, 2)
85 self.assertEqual(systemtime_object.hours, 0)
86 self.assertEqual(systemtime_object.minutes, 0)
87 self.assertEqual(systemtime_object.seconds, 0)
88 self.assertEqual(systemtime_object.milliseconds, 0)
89
90 with self.assertRaises(ValueError):
91 systemtime_object.CopyFromString(u'1600-01-02 00:00:00')
92
93 def testCopyToStatTimeTuple(self):
94 """Tests the CopyToStatTimeTuple function."""
95 systemtime_object = systemtime.Systemtime(
96 year=2010, month=8, day_of_month=12, hours=20, minutes=6, seconds=31)
97
98 expected_stat_time_tuple = (1281643591, 0)
99 stat_time_tuple = systemtime_object.CopyToStatTimeTuple()
100 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
101
102 systemtime_object = systemtime.Systemtime()
103
104 expected_stat_time_tuple = (None, None)
105 stat_time_tuple = systemtime_object.CopyToStatTimeTuple()
106 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
107
108 def testGetPlasoTimestamp(self):
109 """Tests the GetPlasoTimestamp function."""
110 systemtime_object = systemtime.Systemtime(
111 year=2010, month=8, day_of_month=12, hours=20, minutes=6, seconds=31)
112
113 expected_micro_posix_number_of_seconds = 1281643591000000
114 micro_posix_number_of_seconds = systemtime_object.GetPlasoTimestamp()
115 self.assertEqual(
116 micro_posix_number_of_seconds, expected_micro_posix_number_of_seconds)
117
118 systemtime_object = systemtime.Systemtime()
119
120 micro_posix_number_of_seconds = systemtime_object.GetPlasoTimestamp()
121 self.assertIsNone(micro_posix_number_of_seconds)
122
123
124 if __name__ == '__main__':
125 unittest.main()
66 from dfdatetime import time_elements
77
88
9 class PosixTimeTest(unittest.TestCase):
10 """Tests for the POSIX timestamp object."""
9 class TimeElementsTimeTest(unittest.TestCase):
10 """Tests for the time elements."""
11
12 # pylint: disable=protected-access
13
14 def testCopyFromString(self):
15 """Tests the CopyFromString function."""
16 time_elements_object = time_elements.TimeElements()
17
18 expected_time_elements_tuple = (2010, 8, 12, 0, 0, 0)
19 expected_number_of_seconds = 1281571200
20 time_elements_object.CopyFromString(u'2010-08-12')
21 self.assertEqual(
22 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
23 self.assertEqual(
24 time_elements_object._number_of_seconds, expected_number_of_seconds)
25
26 expected_time_elements_tuple = (2010, 8, 12, 21, 6, 31)
27 expected_number_of_seconds = 1281647191
28 time_elements_object.CopyFromString(u'2010-08-12 21:06:31')
29 self.assertEqual(
30 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
31 self.assertEqual(
32 time_elements_object._number_of_seconds, expected_number_of_seconds)
33
34 expected_time_elements_tuple = (2010, 8, 12, 21, 6, 31)
35 expected_number_of_seconds = 1281647191
36 time_elements_object.CopyFromString(u'2010-08-12 21:06:31.546875')
37 self.assertEqual(
38 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
39 self.assertEqual(
40 time_elements_object._number_of_seconds, expected_number_of_seconds)
41
42 expected_time_elements_tuple = (2010, 8, 12, 22, 6, 31)
43 expected_number_of_seconds = 1281650791
44 time_elements_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
45 self.assertEqual(
46 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
47 self.assertEqual(
48 time_elements_object._number_of_seconds, expected_number_of_seconds)
49
50 expected_time_elements_tuple = (2010, 8, 12, 20, 6, 31)
51 expected_number_of_seconds = 1281643591
52 time_elements_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
53 self.assertEqual(
54 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
55 self.assertEqual(
56 time_elements_object._number_of_seconds, expected_number_of_seconds)
57
58 expected_time_elements_tuple = (1601, 1, 2, 0, 0, 0)
59 expected_number_of_seconds = -11644387200
60 time_elements_object.CopyFromString(u'1601-01-02 00:00:00')
61 self.assertEqual(
62 time_elements_object._time_elements_tuple, expected_time_elements_tuple)
63 self.assertEqual(
64 time_elements_object._number_of_seconds, expected_number_of_seconds)
1165
1266 def testCopyToStatTimeTuple(self):
1367 """Tests the CopyToStatTimeTuple function."""
1468 time_elements_object = time_elements.TimeElements(
15 (2010, 8, 12, 20, 6, 31))
69 time_elements_tuple=(2010, 8, 12, 20, 6, 31))
1670
17 expected_stat_time_tuple = (1281643591, 0)
71 expected_stat_time_tuple = (1281643591, None)
1872 stat_time_tuple = time_elements_object.CopyToStatTimeTuple()
1973 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
74
75 time_elements_object = time_elements.TimeElements()
76
77 expected_stat_time_tuple = (None, None)
78 stat_time_tuple = time_elements_object.CopyToStatTimeTuple()
79 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
80
81 def testGetPlasoTimestamp(self):
82 """Tests the GetPlasoTimestamp function."""
83 time_elements_object = time_elements.TimeElements(
84 time_elements_tuple=(2010, 8, 12, 20, 6, 31))
85
86 expected_micro_posix_number_of_seconds = 1281643591000000
87 micro_posix_number_of_seconds = time_elements_object.GetPlasoTimestamp()
88 self.assertEqual(
89 micro_posix_number_of_seconds, expected_micro_posix_number_of_seconds)
90
91 time_elements_object = time_elements.TimeElements()
92
93 micro_posix_number_of_seconds = time_elements_object.GetPlasoTimestamp()
94 self.assertIsNone(micro_posix_number_of_seconds)
2095
2196
2297 if __name__ == '__main__':
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Tests for the WebKit timestamp implementation."""
3
4 import unittest
5
6 from dfdatetime import webkit_time
7
8
9 class WebKitTimeTest(unittest.TestCase):
10 """Tests for the WebKit timestamp."""
11
12 def testCopyFromString(self):
13 """Tests the CopyFromString function."""
14 webkit_time_object = webkit_time.WebKitTime()
15
16 expected_timestamp = 12926044800000000
17 webkit_time_object.CopyFromString(u'2010-08-12')
18 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
19
20 expected_timestamp = 12926120791000000
21 webkit_time_object.CopyFromString(u'2010-08-12 21:06:31')
22 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
23
24 expected_timestamp = 12926120791546875
25 webkit_time_object.CopyFromString(u'2010-08-12 21:06:31.546875')
26 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
27
28 expected_timestamp = 12926124391546875
29 webkit_time_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00')
30 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
31
32 expected_timestamp = 12926117191546875
33 webkit_time_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00')
34 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
35
36 expected_timestamp = 86400 * 1000000
37 webkit_time_object.CopyFromString(u'1601-01-02 00:00:00')
38 self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
39
40 with self.assertRaises(ValueError):
41 webkit_time_object.CopyFromString(u'1500-01-02 00:00:00')
42
43 def testCopyToStatTimeTuple(self):
44 """Tests the CopyToStatTimeTuple function."""
45 webkit_time_object = webkit_time.WebKitTime(timestamp=12926120791546875)
46
47 expected_stat_time_tuple = (1281647191, 5468750)
48 stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
49 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
50
51 webkit_time_object = webkit_time.WebKitTime(timestamp=0x1ffffffffffffffff)
52
53 expected_stat_time_tuple = (None, None)
54 stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
55 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
56
57 webkit_time_object = webkit_time.WebKitTime()
58
59 expected_stat_time_tuple = (None, None)
60 stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
61 self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
62
63 def testGetPlasoTimestamp(self):
64 """Tests the GetPlasoTimestamp function."""
65 webkit_time_object = webkit_time.WebKitTime(timestamp=12926120791546875)
66
67 expected_micro_posix_timestamp = 1281647191546875
68 micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
69 self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)
70
71 webkit_time_object = webkit_time.WebKitTime(timestamp=0x1ffffffffffffffff)
72
73 micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
74 self.assertIsNone(micro_posix_timestamp)
75
76 webkit_time_object = webkit_time.WebKitTime()
77
78 micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
79 self.assertIsNone(micro_posix_timestamp)
80
81
82 if __name__ == '__main__':
83 unittest.main()
0 [tox]
1 envlist = py27, py34
2
3 [testenv]
4 pip_pre = True
5 setenv =
6 PYTHONPATH = {toxinidir}
7 deps = -rrequirements.txt
8 commands = python run_tests.py
0 # -*- coding: utf-8 -*-
0 # Original file copied from:
1 # http://src.chromium.org/chrome/trunk/tools/depot_tools/pylintrc
2
3 [MASTER]
4
5 # Specify a configuration file.
6 #rcfile=
7
8 # Python code to execute, usually for sys.path manipulation such as
9 # pygtk.require().
10 #init-hook=
11
12 # Add files or directories to the blacklist. They should be base names, not
13 # paths.
14 ignore=CVS
15
16 # Pickle collected data for later comparisons.
17 persistent=yes
18
19 # List of plugins (as comma separated values of python modules names) to load,
20 # usually to register additional checkers.
21 load-plugins=
22
23
24 [MESSAGES CONTROL]
25
26 # Enable the message, report, category or checker with the given id(s). You can
27 # either give multiple identifier separated by comma (,) or put this option
28 # multiple time.
29 #enable=
30
31 # Disable the message, report, category or checker with the given id(s). You
32 # can either give multiple identifier separated by comma (,) or put this option
33 # multiple time (only on the command line, not in the configuration file where
34 # it should appear only once).
35 # CHANGED:
36 #
37 # C0103: Invalid name ""
38 # C0302: Too many lines in module (N)
39 #
40 # I0010: Unable to consider inline option ''
41 # I0011: Locally disabling WNNNN
42 #
43 # R0201: Method could be a function
44 # R0801: Similar lines in N files
45 # R0901: Too many ancestors (N/7)
46 # R0902: Too many instance attributes (N/7)
47 # R0903: Too few public methods (N/2)
48 # R0904: Too many public methods (N/20)
49 # R0911: Too many return statements (N/6)
50 # R0912: Too many branches (N/12)
51 # R0913: Too many arguments (N/5)
52 # R0914: Too many local variables (N/15)
53 # R0915: Too many statements (N/50)
54 # R0921: Abstract class not referenced
55 # R0922: Abstract class is only referenced 1 times
56 #
57 # W0141: Used builtin function ''
58 # W0142: Used * or ** magic
59 # W0402: Uses of a deprecated module 'string'
60 # W0404: 41: Reimport 'XX' (imported line NN)
61 # W0511: TODO
62 # W1201: Specify string format arguments as logging function parameters
63 #
64 # Disabled:
65 # locally-enabled
66 # logging-format-interpolation
67 # redefined-variable-type
68 # simplifiable-if-statement
69 # too-many-boolean-expressions (N/5)
70 # too-many-nested-blocks (N/5)
71 # ungrouped-imports
72
73 disable=C0103,C0302,I0010,I0011,R0201,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0141,W0142,W0402,W0404,W0511,W1201,locally-enabled,logging-format-interpolation,redefined-variable-type,simplifiable-if-statement,too-many-boolean-expressions,too-many-nested-blocks,ungrouped-imports
74
75
76 [REPORTS]
77
78 # Set the output format. Available formats are text, parseable, colorized, msvs
79 # (visual studio) and html
80 output-format=text
81
82 # Put messages in a separate file for each module / package specified on the
83 # command line instead of printing them on stdout. Reports (if any) will be
84 # written in a file name "pylint_global.[txt|html]".
85 files-output=no
86
87 # Tells whether to display a full report or only the messages
88 # CHANGED:
89 reports=no
90
91 # Python expression which should return a note less than 10 (10 is the highest
92 # note). You have access to the variables errors warning, statement which
93 # respectively contain the number of errors / warnings messages and the total
94 # number of statements analyzed. This is used by the global evaluation report
95 # (RP0004).
96 evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
97
98
99 [VARIABLES]
100
101 # Tells whether we should check for unused import in __init__ files.
102 init-import=no
103
104 # A regular expression matching the beginning of the name of unused variables.
105 # By default this is _ and dummy but we prefer _ and unused.
106 dummy-variables-rgx=_|unused
107
108 # List of additional names supposed to be defined in builtins. Remember that
109 # you should avoid to define new builtins when possible.
110 additional-builtins=
111
112
113 [TYPECHECK]
114
115 # Tells whether missing members accessed in mixin class should be ignored. A
116 # mixin class is detected if its name ends with "mixin" (case insensitive).
117 ignore-mixin-members=yes
118
119 # List of classes names for which member attributes should not be checked
120 # (useful for classes with attributes dynamically set).
121 ignored-classes=SQLObject,twisted.internet.reactor,hashlib,google.appengine.api.memcache
122
123 # List of members which are set dynamically and missed by pylint inference
124 # system, and so shouldn't trigger E0201 when accessed. Python regular
125 # expressions are accepted.
126 generated-members=REQUEST,acl_users,aq_parent,multiprocessing.managers.SyncManager
127
128
129 [MISCELLANEOUS]
130
131 # List of note tags to take in consideration, separated by a comma.
132 notes=FIXME,XXX,TODO
133
134
135 [SIMILARITIES]
136
137 # Minimum lines number of a similarity.
138 min-similarity-lines=4
139
140 # Ignore comments when computing similarities.
141 ignore-comments=yes
142
143 # Ignore docstrings when computing similarities.
144 ignore-docstrings=yes
145
146
147 [FORMAT]
148
149 # Maximum number of characters on a single line.
150 max-line-length=80
151
152 # Maximum number of lines in a module
153 max-module-lines=1000
154
155 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
156 # tab).
157 # CHANGED:
158 indent-string=' '
159
160
161 [BASIC]
162
163 # List of builtins function names that should not be used, separated by a comma
164 bad-functions=map,filter,apply,input
165
166 # Regular expression which should only match correct module names
167 module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
168
169 # Regular expression which should only match correct module level names
170 const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
171
172 # Regular expression which should only match correct class names
173 class-rgx=[A-Z_][a-zA-Z0-9]+$
174
175 # Regular expression which should only match correct function names
176 function-rgx=[a-z_][a-z0-9_]{2,30}$
177
178 # Regular expression which should only match correct method names
179 method-rgx=[a-z_][a-z0-9_]{2,30}$
180
181 # Regular expression which should only match correct instance attribute names
182 attr-rgx=[a-z_][a-z0-9_]{2,30}$
183
184 # Regular expression which should only match correct argument names
185 argument-rgx=[a-z_][a-z0-9_]{2,30}$
186
187 # Regular expression which should only match correct variable names
188 variable-rgx=[a-z_][a-z0-9_]{2,30}$
189
190 # Regular expression which should only match correct list comprehension /
191 # generator expression variable names
192 inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
193
194 # Good variable names which should always be accepted, separated by a comma
195 good-names=i,j,k,ex,Run,_
196
197 # Bad variable names which should always be refused, separated by a comma
198 bad-names=foo,bar,baz,toto,tutu,tata
199
200 # Regular expression which should only match functions or classes name which do
201 # not require a docstring
202 no-docstring-rgx=__.*__
203
204
205 [DESIGN]
206
207 # Maximum number of arguments for function / method
208 max-args=5
209
210 # Argument names that match this expression will be ignored. Default to name
211 # with leading underscore
212 ignored-argument-names=_.*
213
214 # Maximum number of locals for function / method body
215 max-locals=15
216
217 # Maximum number of return / yield for function / method body
218 max-returns=6
219
220 # Maximum number of branch for function / method body
221 max-branchs=12
222
223 # Maximum number of statements in function / method body
224 max-statements=50
225
226 # Maximum number of parents for a class (see R0901).
227 max-parents=7
228
229 # Maximum number of attributes for a class (see R0902).
230 max-attributes=7
231
232 # Minimum number of public methods for a class (see R0903).
233 min-public-methods=2
234
235 # Maximum number of public methods for a class (see R0904).
236 max-public-methods=20
237
238
239 [CLASSES]
240
241 # List of method names used to declare (i.e. assign) instance attributes.
242 defining-attr-methods=__init__,__new__,setUp
243
244 # List of valid names for the first argument in a class method.
245 valid-classmethod-first-arg=cls
246
247
248 [IMPORTS]
249
250 # Deprecated modules which should not be used, separated by a comma
251 deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
252
253 # Create a graph of every (i.e. internal and external) dependencies in the
254 # given file (report RP0402 must not be disabled)
255 import-graph=
256
257 # Create a graph of external dependencies in the given file (report RP0402 must
258 # not be disabled)
259 ext-import-graph=
260
261 # Create a graph of internal dependencies in the given file (report RP0402 must
262 # not be disabled)
263 int-import-graph=
264
265
266 [EXCEPTIONS]
267
268 # Exceptions that will emit a warning when being caught. Defaults to
269 # "Exception"
270 overgeneral-exceptions=Exception
0 #!/usr/bin/python
1 # -*- coding: utf-8 -*-
2 """Script to manage code reviews."""
3
4 from __future__ import print_function
5 import argparse
6 import json
7 import logging
8 import os
9 import re
10 import shlex
11 import subprocess
12 import sys
13 import time
14
15 # pylint: disable=import-error
16 # pylint: disable=no-name-in-module
17 if sys.version_info[0] < 3:
18 # Use urllib2 here since this code should be able to be used by a default
19 # Python set up. Otherwise usage of requests is preferred.
20 import urllib as urllib_parse
21 import urllib2 as urllib_error
22 import urllib2 as urllib_request
23 else:
24 import urllib.error as urllib_error
25 import urllib.parse as urllib_parse
26 import urllib.request as urllib_request
27
28 # Change PYTHONPATH to include utils.
29 sys.path.insert(0, u'.')
30
31 import utils.upload
32
33
34 class CLIHelper(object):
35 """Class that defines CLI helper functions."""
36
37 def RunCommand(self, command):
38 """Runs a command.
39
40 Args:
41 command (str): command to run.
42
43 Returns:
44 tuple[int,file,file]: exit code, stdout and stderr file-like objects.
45 """
46 arguments = shlex.split(command)
47 process = subprocess.Popen(
48 arguments, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
49 if not process:
50 logging.error(u'Running: "{0:s}" failed.'.format(command))
51 return 1, None, None
52
53 output, error = process.communicate()
54 if process.returncode != 0:
55 logging.error(u'Running: "{0:s}" failed with error: {1:s}.'.format(
56 command, error))
57
58 return process.returncode, output, error
59
60
61 class CodeReviewHelper(CLIHelper):
62 """Class that defines codereview helper functions."""
63
64 _REVIEWERS = frozenset([
65 u'jberggren@gmail.com',
66 u'joachim.metz@gmail.com',
67 u'onager@deerpie.com',
68 u'romaing@google.com'])
69
70 _REVIEWERS_CC = frozenset([
71 u'kiddi@kiddaland.net',
72 u'log2timeline-dev@googlegroups.com'])
73
74 def __init__(self, email_address, no_browser=False):
75 """Initializes a codereview helper object.
76
77 Args:
78 email_address (str): email address.
79 no_browser (Optional[bool]): True if the functionality to use the
80 webbrowser to get the OAuth token should be disabled.
81 """
82 super(CodeReviewHelper, self).__init__()
83 self._access_token = None
84 self._email_address = email_address
85 self._no_browser = no_browser
86 self._upload_py_path = os.path.join(u'utils', u'upload.py')
87 self._xsrf_token = None
88
89 def AddMergeMessage(self, issue_number, message):
90 """Adds a merge message to the code review issue.
91
92 Where the merge is a commit to the main project git repository.
93
94 Args:
95 issue_number (int|str): codereview issue number.
96 message (str): message to add to the code review issue.
97
98 Returns:
99 bool: merge message was added to the code review issue.
100 """
101 codereview_access_token = self.GetAccessToken()
102 xsrf_token = self.GetXSRFToken()
103 if not codereview_access_token or not xsrf_token:
104 return False
105
106 codereview_url = b'https://codereview.appspot.com/{0!s}/publish'.format(
107 issue_number)
108
109 post_data = urllib_parse.urlencode({
110 u'add_as_reviewer': u'False',
111 u'message': message,
112 u'message_only': u'True',
113 u'no_redirect': 'True',
114 u'send_mail': 'True',
115 u'xsrf_token': xsrf_token})
116
117 request = urllib_request.Request(codereview_url)
118
119 # Add header: Authorization: OAuth <codereview access token>
120 request.add_header(
121 u'Authorization', u'OAuth {0:s}'.format(codereview_access_token))
122
123 # This will change the request into a POST.
124 request.add_data(post_data)
125
126 try:
127 url_object = urllib_request.urlopen(request)
128 except urllib_error.HTTPError as exception:
129 logging.error(
130 u'Failed publish to codereview issue: {0!s} with error: {1:s}'.format(
131 issue_number, exception))
132 return False
133
134 if url_object.code not in (200, 201):
135 logging.error((
136 u'Failed publish to codereview issue: {0!s} with status code: '
137 u'{1:d}').format(issue_number, url_object.code))
138 return False
139
140 return True
141
142 def CloseIssue(self, issue_number):
143 """Closes a code review issue.
144
145 Args:
146 issue_number (int|str): codereview issue number.
147
148 Returns:
149 bool: True if the code review was closed.
150 """
151 codereview_access_token = self.GetAccessToken()
152 xsrf_token = self.GetXSRFToken()
153 if not codereview_access_token or not xsrf_token:
154 return False
155
156 codereview_url = b'https://codereview.appspot.com/{0!s}/close'.format(
157 issue_number)
158
159 post_data = urllib_parse.urlencode({
160 u'xsrf_token': xsrf_token})
161
162 request = urllib_request.Request(codereview_url)
163
164 # Add header: Authorization: OAuth <codereview access token>
165 request.add_header(
166 u'Authorization', u'OAuth {0:s}'.format(codereview_access_token))
167
168 # This will change the request into a POST.
169 request.add_data(post_data)
170
171 try:
172 url_object = urllib_request.urlopen(request)
173 except urllib_error.HTTPError as exception:
174 logging.error(
175 u'Failed closing codereview issue: {0!s} with error: {1:s}'.format(
176 issue_number, exception))
177 return False
178
179 if url_object.code != 200:
180 logging.error((
181 u'Failed closing codereview issue: {0!s} with status code: '
182 u'{1:d}').format(issue_number, url_object.code))
183 return False
184
185 return True
186
187 def CreateIssue(self, diffbase, description):
188 """Creates a new codereview issue.
189
190 Args:
191 diffbase (str): diffbase.
192 description (str): description.
193
194 Returns:
195 int: codereview issue number or None.
196 """
197 reviewers = list(self._REVIEWERS)
198 reviewers_cc = list(self._REVIEWERS_CC)
199
200 try:
201 # Remove self from reviewers list.
202 reviewers.remove(self._email_address)
203 except ValueError:
204 pass
205
206 try:
207 # Remove self from reviewers CC list.
208 reviewers_cc.remove(self._email_address)
209 except ValueError:
210 pass
211
212 reviewers = u','.join(reviewers)
213 reviewers_cc = u','.join(reviewers_cc)
214
215 command = u'{0:s} {1:s} --oauth2'.format(
216 sys.executable, self._upload_py_path)
217
218 if self._no_browser:
219 command = u'{0:s} --no_oauth2_webbrowser'.format(command)
220
221 command = (
222 u'{0:s} --send_mail -r {1:s} --cc {2:s} -t "{3:s}" -y -- '
223 u'{4:s}').format(
224 command, reviewers, reviewers_cc, description, diffbase)
225
226 if self._no_browser:
227 print(
228 u'Upload server: codereview.appspot.com (change with -s/--server)\n'
229 u'Go to the following link in your browser:\n'
230 u'\n'
231 u' https://codereview.appspot.com/get-access-token\n'
232 u'\n'
233 u'and copy the access token.\n'
234 u'\n')
235 print(u'Enter access token:', end=u' ')
236
237 sys.stdout.flush()
238
239 exit_code, output, _ = self.RunCommand(command)
240 print(output)
241
242 if exit_code != 0:
243 return
244
245 issue_url_line_start = (
246 u'Issue created. URL: http://codereview.appspot.com/')
247 for line in output.split(b'\n'):
248 if issue_url_line_start in line:
249 _, _, issue_number = line.rpartition(issue_url_line_start)
250 try:
251 return int(issue_number, 10)
252 except ValueError:
253 pass
254
255 def GetAccessToken(self):
256 """Retrieves the OAuth access token.
257
258 Returns:
259 str: codereview access token.
260 """
261 if not self._access_token:
262 # TODO: add support to get access token directly from user.
263 self._access_token = utils.upload.GetAccessToken()
264 if not self._access_token:
265 logging.error(u'Unable to retrieve access token.')
266
267 return self._access_token
268
269 def GetXSRFToken(self):
270 """Retrieves the XSRF token.
271
272 Returns:
273 str: codereview XSRF token or None if the token could not be obtained.
274 """
275 if not self._xsrf_token:
276 codereview_access_token = self.GetAccessToken()
277 if not codereview_access_token:
278 return
279
280 codereview_url = b'https://codereview.appspot.com/xsrf_token'
281
282 request = urllib_request.Request(codereview_url)
283
284 # Add header: Authorization: OAuth <codereview access token>
285 request.add_header(
286 u'Authorization', u'OAuth {0:s}'.format(codereview_access_token))
287 request.add_header(u'X-Requesting-XSRF-Token', u'1')
288
289 try:
290 url_object = urllib_request.urlopen(request)
291 except urllib_error.HTTPError as exception:
292 logging.error(
293 u'Failed retrieving codereview XSRF token with error: {0:s}'.format(
294 exception))
295 return
296
297 if url_object.code != 200:
298 logging.error((
299 u'Failed retrieving codereview XSRF token with status code: '
300 u'{0:d}').format(url_object.code))
301 return
302
303 self._xsrf_token = url_object.read()
304
305 return self._xsrf_token
306
307 def QueryIssue(self, issue_number):
308 """Queries the information of a code review issue.
309
310 The query returns JSON data that contains:
311 {
312 "description":str,
313 "cc":[str],
314 "reviewers":[str],
315 "owner_email":str,
316 "private":bool,
317 "base_url":str,
318 "owner":str,
319 "subject":str,
320 "created":str,
321 "patchsets":[int],
322 "modified":str,
323 "project":str,
324 "closed":bool,
325 "issue":int
326 }
327
328 Where the "created" and "modified" strings are formatted as:
329 "YYYY-MM-DD hh:mm:ss.######"
330
331 Args:
332 issue_number (int|str): codereview issue number.
333
334 Returns:
335 dict[str,object]: JSON response or None.
336 """
337 codereview_url = b'https://codereview.appspot.com/api/{0!s}'.format(
338 issue_number)
339
340 request = urllib_request.Request(codereview_url)
341
342 try:
343 url_object = urllib_request.urlopen(request)
344 except urllib_error.HTTPError as exception:
345 logging.error(
346 u'Failed querying codereview issue: {0!s} with error: {1:s}'.format(
347 issue_number, exception))
348 return
349
350 if url_object.code != 200:
351 logging.error((
352 u'Failed querying codereview issue: {0!s} with status code: '
353 u'{1:d}').format(issue_number, url_object.code))
354 return
355
356 response_data = url_object.read()
357 return json.loads(response_data)
358
359 def UpdateIssue(self, issue_number, diffbase, description):
360 """Updates a code review issue.
361
362 Args:
363 issue_number (int|str): codereview issue number.
364 diffbase (str): diffbase.
365 description (str): description.
366
367 Returns:
368 bool: True if the code review was updated.
369 """
370 command = u'{0:s} {1:s} --oauth2'.format(
371 sys.executable, self._upload_py_path)
372
373 if self._no_browser:
374 command = u'{0:s} --no_oauth2_webbrowser'.format(command)
375
376 command = (
377 u'{0:s} -i {1!s} -m "Code updated." -t "{2:s}" -y -- '
378 u'{3:s}').format(command, issue_number, description, diffbase)
379
380 if self._no_browser:
381 print(
382 u'Upload server: codereview.appspot.com (change with -s/--server)\n'
383 u'Go to the following link in your browser:\n'
384 u'\n'
385 u' https://codereview.appspot.com/get-access-token\n'
386 u'\n'
387 u'and copy the access token.\n'
388 u'\n')
389 print(u'Enter access token:', end=u' ')
390
391 sys.stdout.flush()
392
393 exit_code, output, _ = self.RunCommand(command)
394 print(output)
395
396 return exit_code == 0
397
398
399 class GitHelper(CLIHelper):
400 """Class that defines git helper functions."""
401
402 def __init__(self, git_repo_url):
403 """Initializes a git helper object.
404
405 Args:
406 git_repo_url (str): git repo URL.
407 """
408 super(GitHelper, self).__init__()
409 self._git_repo_url = git_repo_url
410 self._remotes = []
411
412 def _GetRemotes(self):
413 """Retrieves the git repository remotes.
414
415 Returns:
416 list[str]: git repository remotes or None.
417 """
418 if not self._remotes:
419 exit_code, output, _ = self.RunCommand(u'git remote -v')
420 if exit_code == 0:
421 self._remotes = output.split(b'\n')
422
423 return self._remotes
424
425 def AddPath(self, path):
426 """Adds a specific path to be managed by git.
427
428 Args:
429 path (str): path.
430
431 Returns:
432 bool: True if the path was added.
433 """
434 command = u'git add -A {0:s}'.format(path)
435 exit_code, _, _ = self.RunCommand(command)
436 return exit_code == 0
437
438 def CheckHasBranch(self, branch):
439 """Checks if the git repo has a specific branch.
440
441 Args:
442 branch (str): name of the feature branch.
443
444 Returns:
445 bool: True if git repo has the specific branch.
446 """
447 exit_code, output, _ = self.RunCommand(u'git branch')
448 if exit_code != 0:
449 return False
450
451 # Check for remote entries starting with upstream.
452 for line in output.split(b'\n'):
453 # Ignore the first 2 characters of the line.
454 if line[2:] == branch:
455 return True
456 return False
457
458 def CheckHasProjectOrigin(self):
459 """Checks if the git repo has the project remote origin defined.
460
461 Returns:
462 bool: True if the git repo has the project origin defined.
463 """
464 origin_git_repo_url = self.GetRemoteOrigin()
465 return origin_git_repo_url == self._git_repo_url
466
467 def CheckHasProjectUpstream(self):
468 """Checks if the git repo has the project remote upstream defined.
469
470 Returns:
471 bool: True if the git repo has the project remote upstream defined.
472 """
473 # Check for remote entries starting with upstream.
474 for remote in self._GetRemotes():
475 if remote.startswith(b'upstream\t{0:s}'.format(self._git_repo_url)):
476 return True
477 return False
478
479 def CheckHasUncommittedChanges(self):
480 """Checks if the git repo has uncommitted changes.
481
482 Returns:
483 bool: True if the git repo has uncommitted changes.
484 """
485 exit_code, output, _ = self.RunCommand(u'git status -s')
486 if exit_code != 0:
487 return False
488
489 # Check if 'git status -s' yielded any output.
490 for line in output.split(b'\n'):
491 if line:
492 return True
493 return False
494
495 def CheckSynchronizedWithUpstream(self):
496 """Checks if the git repo is synchronized with upstream.
497
498 Returns:
499 bool: True if the git repo is synchronized with upstream.
500 """
501 # Fetch the entire upstream repo information not only that of
502 # the master branch. Otherwise the information about the current
503 # upstream HEAD is not updated.
504 exit_code, _, _ = self.RunCommand(u'git fetch upstream')
505 if exit_code != 0:
506 return False
507
508 # The result of "git log HEAD..upstream/master --oneline" should be empty
509 # if the git repo is synchronized with upstream.
510 exit_code, output, _ = self.RunCommand(
511 u'git log HEAD..upstream/master --oneline')
512 return exit_code == 0 and not output
513
514 def CommitToOriginInNameOf(
515 self, codereview_issue_number, author, description):
516 """Commits changes in name of an author to the master branch of origin.
517
518 Args:
519 codereview_issue_number (int|str): codereview issue number.
520 author (str): full name and email address of the author, formatted as:
521 "Full Name <email.address@example.com>".
522 description (str): description of the commit.
523
524 Returns:
525 bool: True if the changes were committed to the git repository.
526 """
527 command = (
528 u'git commit -a --author="{0:s}" '
529 u'-m "Code review: {1:s}: {2:s}"').format(
530 author, codereview_issue_number, description)
531 exit_code, _, _ = self.RunCommand(command)
532 if exit_code != 0:
533 return False
534
535 exit_code, _, _ = self.RunCommand(u'git push origin master')
536 if exit_code != 0:
537 return False
538
539 return True
540
541 def DropUncommittedChanges(self):
542 """Drops the uncommitted changes."""
543 self.RunCommand(u'git stash')
544 self.RunCommand(u'git stash drop')
545
546 def GetActiveBranch(self):
547 """Retrieves the active branch.
548
549 Returns:
550 str: name of the active branch or None.
551 """
552 exit_code, output, _ = self.RunCommand(u'git branch')
553 if exit_code != 0:
554 return False
555
556 # Check for remote entries starting with upstream.
557 for line in output.split(b'\n'):
558 if line.startswith(b'* '):
559 # Ignore the first 2 characters of the line.
560 return line[2:]
561 return
562
563 def GetChangedFiles(self, diffbase=None):
564 """Retrieves the changed files.
565
566 Args:
567 diffbase (Optional[str]): git diffbase, for example "upstream/master".
568
569 Returns:
570 list[str]: names of the changed files.
571 """
572 if diffbase:
573 command = u'git diff --name-only {0:s}'.format(diffbase)
574 else:
575 command = u'git ls-files'
576
577 exit_code, output, _ = self.RunCommand(command)
578 if exit_code != 0:
579 return []
580
581 return output.split(b'\n')
582
583 def GetChangedPythonFiles(self, diffbase=None):
584 """Retrieves the changed Python files.
585
586 Note that several Python files are excluded:
587 * Python files generated by the protobuf compiler (*_pb2.py)
588 * Python files used as test data (test_data/*.py)
589 * Python files used for sphinx (docs/*.py)
590 * setup.py and utils/upload.py
591
592 Args:
593 diffbase (Optional[str]): git diffbase, for example "upstream/master".
594
595 Returns:
596 list[str]: names of the changed Python files.
597 """
598 upload_path = os.path.join(u'utils', u'upload.py')
599 python_files = []
600 for changed_file in self.GetChangedFiles(diffbase=diffbase):
601 if (not changed_file.endswith(u'.py') or
602 changed_file.endswith(u'_pb2.py') or
603 not os.path.exists(changed_file) or
604 changed_file.startswith(u'docs') or
605 changed_file.startswith(u'test_data') or
606 changed_file in (u'setup.py', upload_path)):
607 continue
608
609 python_files.append(changed_file)
610
611 return python_files
612
613 def GetEmailAddress(self):
614 """Retrieves the email address.
615
616 Returns:
617 str: email address or None.
618 """
619 exit_code, output, _ = self.RunCommand(u'git config user.email')
620 if exit_code != 0:
621 return
622
623 output_lines = output.split(b'\n')
624 if not output_lines:
625 return
626
627 return output_lines[0]
628
629 def GetLastCommitMessage(self):
630 """Retrieves the last commit message.
631
632 Returns:
633 str: last commit message or None.
634 """
635 exit_code, output, _ = self.RunCommand(u'git log -1')
636 if exit_code != 0:
637 return
638
639 # Expecting 6 lines of output where the 5th line contains
640 # the commit message.
641 output_lines = output.split(b'\n')
642 if len(output_lines) != 6:
643 return
644
645 return output_lines[4].strip()
646
647 def GetRemoteOrigin(self):
648 """Retrieves the remote origin.
649
650 Returns:
651 str: git repository URL or None.
652 """
653 # Check for remote entries starting with origin.
654 for remote in self._GetRemotes():
655 if remote.startswith(b'origin\t'):
656 values = remote.split()
657 if len(values) == 3:
658 return values[1]
659
660 def PullFromFork(self, git_repo_url, branch):
661 """Pulls changes from a feature branch on a fork.
662
663 Args:
664 git_repo_url (str): git repository URL of the fork.
665 branch (str): name of the feature branch of the fork.
666
667 Returns:
668 bool: True if the pull was successful.
669 """
670 command = u'git pull --squash {0:s} {1:s}'.format(git_repo_url, branch)
671 exit_code, _, _ = self.RunCommand(command)
672 return exit_code == 0
673
674 def PushToOrigin(self, branch, force=False):
675 """Forces a push of the active branch of the git repo to origin.
676
677 Args:
678 branch (str): name of the feature branch.
679 force (Optional[bool]): True if the push should be forced.
680
681 Returns:
682 bool: True if the push was successful.
683 """
684 if force:
685 command = u'git push --set-upstream origin {0:s}'.format(branch)
686 else:
687 command = u'git push -f --set-upstream origin {0:s}'.format(branch)
688
689 exit_code, _, _ = self.RunCommand(command)
690 return exit_code == 0
691
692 def RemoveFeatureBranch(self, branch):
693 """Removes the git feature branch both local and from origin.
694
695 Args:
696 branch (str): name of the feature branch.
697 """
698 if branch == u'master':
699 return
700
701 self.RunCommand(u'git push origin --delete {0:s}'.format(branch))
702 self.RunCommand(u'git branch -D {0:s}'.format(branch))
703
704 def SynchronizeWithOrigin(self):
705 """Synchronizes git with origin.
706
707 Returns:
708 bool: True if the git repository has synchronized with origin.
709 """
710 exit_code, _, _ = self.RunCommand(u'git fetch origin')
711 if exit_code != 0:
712 return False
713
714 exit_code, _, _ = self.RunCommand(
715 u'git pull --no-edit origin master')
716
717 return exit_code == 0
718
719 def SynchronizeWithUpstream(self):
720 """Synchronizes git with upstream.
721
722 Returns:
723 bool: True if the git repository has synchronized with upstream.
724 """
725 exit_code, _, _ = self.RunCommand(u'git fetch upstream')
726 if exit_code != 0:
727 return False
728
729 exit_code, _, _ = self.RunCommand(
730 u'git pull --no-edit --rebase upstream master')
731 if exit_code != 0:
732 return False
733
734 exit_code, _, _ = self.RunCommand(u'git push')
735
736 return exit_code == 0
737
738 def SwitchToMasterBranch(self):
739 """Switches git to the master branch.
740
741 Returns:
742 bool: True if the git repository has switched to the master branch.
743 """
744 exit_code, _, _ = self.RunCommand(u'git checkout master')
745 return exit_code != 0
746
747
748 class GitHubHelper(object):
749 """Class that defines github helper functions."""
750
751 def __init__(self, organization, project):
752 """Initializes a github helper object.
753
754 Args:
755 organization (str): github organization name.
756 project (str): github project name.
757 """
758 super(GitHubHelper, self).__init__()
759 self._organization = organization
760 self._project = project
761
762 def CreatePullRequest(
763 self, access_token, codereview_issue_number, origin, description):
764 """Creates a pull request.
765
766 Args:
767 access_token (str): github access token.
768 codereview_issue_number (int|str): codereview issue number.
769 origin (str): origin of the pull request, formatted as:
770 "username:feature".
771 description (str): description.
772
773 Returns:
774 bool: True if the pull request was created.
775 """
776 title = b'{0!s}: {1:s}'.format(codereview_issue_number, description)
777 body = (
778 b'[Code review: {0!s}: {1:s}]'
779 b'(https://codereview.appspot.com/{0!s}/)').format(
780 codereview_issue_number, description)
781
782 post_data = (
783 b'{{\n'
784 b' "title": "{0:s}",\n'
785 b' "body": "{1:s}",\n'
786 b' "head": "{2:s}",\n'
787 b' "base": "master"\n'
788 b'}}\n').format(title, body, origin)
789
790 github_url = (
791 u'https://api.github.com/repos/{0:s}/{1:s}/pulls?'
792 u'access_token={2:s}').format(
793 self._organization, self._project, access_token)
794
795 request = urllib_request.Request(github_url)
796
797 # This will change the request into a POST.
798 request.add_data(post_data)
799
800 try:
801 url_object = urllib_request.urlopen(request)
802 except urllib_error.HTTPError as exception:
803 logging.error(
804 u'Failed creating pull request: {0!s} with error: {1:s}'.format(
805 codereview_issue_number, exception))
806 return False
807
808 if url_object.code not in (200, 201):
809 logging.error(
810 u'Failed creating pull request: {0!s} with status code: {1:d}'.format(
811 codereview_issue_number, url_object.code))
812 return False
813
814 return True
815
816 def GetForkGitRepoUrl(self, username):
817 """Retrieves the git repository URL of a fork.
818
819 Args:
820 username (str): github username of the fork.
821
822 Returns:
823 str: git repository URL or None.
824 """
825 return u'https://github.com/{0:s}/{1:s}.git'.format(username, self._project)
826
827 def QueryUser(self, username):
828 """Queries a github user.
829
830 Args:
831 username (str): github user name.
832
833 Returns:
834 dict[str,object]: JSON response or None.
835 """
836 github_url = b'https://api.github.com/users/{0:s}'.format(username)
837
838 request = urllib_request.Request(github_url)
839
840 try:
841 url_object = urllib_request.urlopen(request)
842 except urllib_error.HTTPError as exception:
843 logging.error(
844 u'Failed querying github user: {0:s} with error: {1:s}'.format(
845 username, exception))
846 return
847
848 if url_object.code != 200:
849 logging.error(
850 u'Failed querying github user: {0:d} with status code: {1:d}'.format(
851 username, url_object.code))
852 return
853
854 response_data = url_object.read()
855 return json.loads(response_data)
856
857
858 class ProjectHelper(CLIHelper):
859 """Class that defines project helper functions.
860
861 Attributes:
862 project_name (str): name of the project.
863 """
864
865 _AUTHORS_FILE_HEADER = [
866 u'# Names should be added to this file with this pattern:',
867 u'#',
868 u'# For individuals:',
869 u'# Name (email address)',
870 u'#',
871 u'# For organizations:',
872 u'# Organization (fnmatch pattern)',
873 u'#',
874 u'# See python fnmatch module documentation for more information.',
875 u'',
876 u'Google Inc. (*@google.com)']
877
878 SUPPORTED_PROJECTS = frozenset([
879 u'dfdatetime', u'dfvfs', u'dfwinreg', u'l2tdevtools', u'l2tdocs',
880 u'plaso'])
881
882 def __init__(self, script_path):
883 """Initializes a project helper object.
884
885 Args:
886 script_path (str): path to the script.
887
888 Raises:
889 ValueError: if the project name is not supported.
890 """
891 super(ProjectHelper, self).__init__()
892 self.project_name = self._GetProjectName(script_path)
893
894 @property
895 def version_file_path(self):
896 """str: path of the version file."""
897 return os.path.join(self.project_name, u'__init__.py')
898
899 def _GetProjectName(self, script_path):
900 """Retrieves the project name from the script path.
901
902 Args:
903 script_path (str): path to the script.
904
905 Returns:
906 str: project name.
907
908 Raises:
909 ValueError: if the project name is not supported.
910 """
911 project_name = os.path.abspath(script_path)
912 project_name = os.path.dirname(project_name)
913 project_name = os.path.dirname(project_name)
914 project_name = os.path.basename(project_name)
915
916 for supported_project_name in self.SUPPORTED_PROJECTS:
917 if supported_project_name in project_name:
918 return supported_project_name
919
920 raise ValueError(
921 u'Unsupported project name: {0:s}.'.format(project_name))
922
923 def _ReadFileContents(self, path):
924 """Reads the contents of a file.
925
926 Args:
927 filename (str): path of the file.
928
929 Returns:
930 bytes: file content or None.
931 """
932 if not os.path.exists(path):
933 logging.error(u'Missing file: {0:s}'.format(path))
934 return
935
936 try:
937 with open(path, u'rb') as file_object:
938 file_contents = file_object.read()
939
940 except IOError as exception:
941 logging.error(u'Unable to read file with error: {0:s}'.format(exception))
942 return
943
944 try:
945 file_contents = file_contents.decode(u'utf-8')
946 except UnicodeDecodeError as exception:
947 logging.error(
948 u'Unable to read file with error: {0:s}'.format(exception))
949 return
950
951 return file_contents
952
953 def GetVersion(self):
954 """Retrieves the project version from the version file.
955
956 Returns:
957 str: project version or None.
958 """
959 version_file_contents = self._ReadFileContents(self.version_file_path)
960 if not version_file_contents:
961 return
962
963 # The version is formatted as:
964 # __version__ = 'VERSION'
965 version_line_prefix = u'__version__ = \''
966
967 lines = version_file_contents.split(u'\n')
968 for line in lines:
969 if line.startswith(version_line_prefix):
970 return line[len(version_line_prefix):-1]
971
972 return
973
974 def UpdateDpkgChangelogFile(self):
975 """Updates the dpkg changelog file.
976
977 Returns:
978 bool: True if the dpkg changelog file was updated or if the dpkg
979 changelog file does not exists.
980 """
981 project_version = self.GetVersion()
982
983 dpkg_changelog_path = os.path.join(u'config', u'dpkg', u'changelog')
984 if not os.path.exists(dpkg_changelog_path):
985 return True
986
987 dpkg_maintainter = u'Log2Timeline <log2timeline-dev@googlegroups.com>'
988 dpkg_date = time.strftime(u'%a, %d %b %Y %H:%M:%S %z')
989 dpkg_changelog_content = u'\n'.join([
990 u'{0:s} ({1:s}-1) unstable; urgency=low'.format(
991 self.project_name, project_version),
992 u'',
993 u' * Auto-generated',
994 u'',
995 u' -- {0:s} {1:s}'.format(dpkg_maintainter, dpkg_date)])
996
997 try:
998 dpkg_changelog_content = dpkg_changelog_content.encode(u'utf-8')
999 except UnicodeEncodeError as exception:
1000 logging.error(
1001 u'Unable to write dpkg changelog file with error: {0:s}'.format(
1002 exception))
1003 return False
1004
1005 try:
1006 with open(dpkg_changelog_path, u'wb') as file_object:
1007 file_object.write(dpkg_changelog_content)
1008 except IOError as exception:
1009 logging.error(
1010 u'Unable to write dpkg changelog file with error: {0:s}'.format(
1011 exception))
1012 return False
1013
1014 return True
1015
1016 def UpdateAuthorsFile(self):
1017 """Updates the AUTHORS file.
1018
1019 Returns:
1020 bool: True if the AUTHORS file update was successful.
1021 """
1022 exit_code, output, _ = self.RunCommand(u'git log --format="%aN (%aE)"')
1023 if exit_code != 0:
1024 return False
1025
1026 lines = output.split(b'\n')
1027
1028 # Reverse the lines since we want the oldest commits first.
1029 lines.reverse()
1030
1031 authors_by_commit = []
1032 authors = {}
1033 for author in lines:
1034 name, _, email_address = author[:-1].rpartition(u'(')
1035 if email_address in authors:
1036 if name != authors[email_address]:
1037 logging.warning(u'Detected name mismatch for author: {0:d}.'.format(
1038 email_address))
1039 continue
1040
1041 authors[email_address] = name
1042 authors_by_commit.append(author)
1043
1044 file_content = []
1045 file_content.extend(self._AUTHORS_FILE_HEADER)
1046 file_content.extend(authors_by_commit)
1047
1048 file_content = u'\n'.join(file_content)
1049 file_content = file_content.encode(u'utf-8')
1050
1051 with open(u'AUTHORS', 'wb') as file_object:
1052 file_object.write(file_content)
1053
1054 return True
1055
1056 def UpdateVersionFile(self):
1057 """Updates the version file.
1058
1059 Returns:
1060 bool: True if the file was updated.
1061 """
1062 version_file_contents = self._ReadFileContents(self.version_file_path)
1063 if not version_file_contents:
1064 logging.error(u'Unable to read version file.')
1065 return False
1066
1067 date_version = time.strftime(u'%Y%m%d')
1068 lines = version_file_contents.split(u'\n')
1069 for line_index, line in enumerate(lines):
1070 if (self.project_name == u'plaso' and
1071 line.startswith(u'VERSION_DATE = ')):
1072 version_string = u'VERSION_DATE = \'{0:s}\''.format(date_version)
1073 lines[line_index] = version_string
1074
1075 elif (self.project_name != u'plaso' and
1076 line.startswith(u'__version__ = ')):
1077 version_string = u'__version__ = \'{0:s}\''.format(date_version)
1078 lines[line_index] = version_string
1079
1080 version_file_contents = u'\n'.join(lines)
1081
1082 try:
1083 version_file_contents = version_file_contents.encode(u'utf-8')
1084 except UnicodeEncodeError as exception:
1085 logging.error(
1086 u'Unable to write version file with error: {0:s}'.format(exception))
1087 return False
1088
1089 try:
1090 with open(self.version_file_path, u'wb') as file_object:
1091 file_object.write(version_file_contents)
1092
1093 except IOError as exception:
1094 logging.error(
1095 u'Unable to write version file with error: {0:s}'.format(exception))
1096 return False
1097
1098 return True
1099
1100
1101 class PylintHelper(CLIHelper):
1102 """Class that defines pylint helper functions."""
1103
1104 _MINIMUM_VERSION_TUPLE = (1, 5, 0)
1105
1106 def CheckFiles(self, filenames):
1107 """Checks if the linting of the files is correct using pylint.
1108
1109 Args:
1110 filenames (list[str]): names of the files to lint.
1111
1112 Returns:
1113 bool: True if the files were linted without errors.
1114 """
1115 print(u'Running linter on changed files.')
1116 failed_filenames = []
1117 for filename in filenames:
1118 print(u'Checking: {0:s}'.format(filename))
1119
1120 command = u'pylint --rcfile=utils/pylintrc {0:s}'.format(filename)
1121 exit_code = subprocess.call(command, shell=True)
1122 if exit_code != 0:
1123 failed_filenames.append(filename)
1124
1125 if failed_filenames:
1126 print(u'\nFiles with linter errors:\n{0:s}\n'.format(
1127 u'\n'.join(failed_filenames)))
1128 return False
1129
1130 return True
1131
1132 def CheckUpToDateVersion(self):
1133 """Checks if the pylint version is up to date.
1134
1135 Returns:
1136 bool: True if the pylint version is up to date.
1137 """
1138 exit_code, output, _ = self.RunCommand(u'pylint --version')
1139 if exit_code != 0:
1140 return False
1141
1142 version_tuple = (0, 0, 0)
1143 for line in output.split(b'\n'):
1144 if line.startswith(b'pylint '):
1145 _, _, version = line.partition(b' ')
1146 # Remove a trailing comma.
1147 version, _, _ = version.partition(b',')
1148
1149 version_tuple = tuple([int(digit) for digit in version.split(b'.')])
1150
1151 return version_tuple >= self._MINIMUM_VERSION_TUPLE
1152
1153
1154 class ReadTheDocsHelper(object):
1155 """Class that defines readthedocs helper functions."""
1156
1157 def __init__(self, project):
1158 """Initializes a readthedocs helper object.
1159
1160 Args:
1161 project (str): github project name.
1162 """
1163 super(ReadTheDocsHelper, self).__init__()
1164 self._project = project
1165
1166 def TriggerBuild(self):
1167 """Triggers readthedocs to build the docs of the project.
1168
1169 Returns:
1170 bool: True if the build was triggered.
1171 """
1172 readthedocs_url = u'https://readthedocs.org/build/{0:s}'.format(
1173 self._project)
1174
1175 request = urllib_request.Request(readthedocs_url)
1176
1177 # This will change the request into a POST.
1178 request.add_data(b'')
1179
1180 try:
1181 url_object = urllib_request.urlopen(request)
1182 except urllib_error.HTTPError as exception:
1183 logging.error(
1184 u'Failed triggering build with error: {0:s}'.format(
1185 exception))
1186 return False
1187
1188 if url_object.code != 200:
1189 logging.error(
1190 u'Failed triggering build with status code: {1:d}'.format(
1191 url_object.code))
1192 return False
1193
1194 return True
1195
1196
1197 class SphinxAPIDocHelper(CLIHelper):
1198 """Class that defines sphinx-apidoc helper functions."""
1199
1200 _MINIMUM_VERSION_TUPLE = (1, 2, 0)
1201
1202 def __init__(self, project):
1203 """Initializes a sphinx-apidoc helper object.
1204
1205 Args:
1206 project (str): github project name.
1207 """
1208 super(SphinxAPIDocHelper, self).__init__()
1209 self._project = project
1210
1211 def CheckUpToDateVersion(self):
1212 """Checks if the sphinx-apidoc version is up to date.
1213
1214 Returns:
1215 bool: True if the sphinx-apidoc version is up to date.
1216 """
1217 exit_code, output, _ = self.RunCommand(u'sphinx-apidoc --version')
1218 if exit_code != 0:
1219 return False
1220
1221 version_tuple = (0, 0, 0)
1222 for line in output.split(b'\n'):
1223 if line.startswith(b'Sphinx (sphinx-apidoc) '):
1224 _, _, version = line.rpartition(b' ')
1225
1226 version_tuple = tuple([int(digit) for digit in version.split(b'.')])
1227
1228 return version_tuple >= self._MINIMUM_VERSION_TUPLE
1229
1230 def UpdateAPIDocs(self):
1231 """Updates the API docs.
1232
1233 Returns:
1234 bool: True if the API docs have been updated.
1235 """
1236 command = u'sphinx-apidoc -f -o docs {0:s}'.format(self._project)
1237 exit_code, output, _ = self.RunCommand(command)
1238 print(output)
1239
1240 return exit_code == 0
1241
1242
1243 class NetRCFile(object):
1244 """Class that defines a .netrc file."""
1245
1246 _NETRC_SEPARATOR_RE = re.compile(r'[^ \t\n]+')
1247
1248 def __init__(self):
1249 """Initializes a .netrc file object."""
1250 super(NetRCFile, self).__init__()
1251 self._contents = None
1252 self._values = None
1253
1254 home_path = os.path.expanduser(u'~')
1255 self._path = os.path.join(home_path, u'.netrc')
1256 if not os.path.exists(self._path):
1257 return
1258
1259 with open(self._path, 'r') as file_object:
1260 self._contents = file_object.read()
1261
1262 def _GetGitHubValues(self):
1263 """Retrieves the github values.
1264
1265 Returns:
1266 list[str]: .netrc values for github.com or None.
1267 """
1268 if not self._contents:
1269 return
1270
1271 # Note that according to GNU's manual on .netrc file, the credential
1272 # tokens "may be separated by spaces, tabs, or new-lines".
1273 if not self._values:
1274 self._values = self._NETRC_SEPARATOR_RE.findall(self._contents)
1275
1276 for value_index, value in enumerate(self._values):
1277 if value == u'github.com' and self._values[value_index - 1] == u'machine':
1278 return self._values[value_index + 1:]
1279
1280 def GetGitHubAccessToken(self):
1281 """Retrieves the github access token.
1282
1283 Returns:
1284 str: github access token or None.
1285 """
1286 values = self._GetGitHubValues()
1287 if not values:
1288 return
1289
1290 for value_index, value in enumerate(values):
1291 if value == u'password':
1292 return values[value_index + 1]
1293
1294 def GetGitHubUsername(self):
1295 """Retrieves the github username.
1296
1297 Returns:
1298 str: github username or None.
1299 """
1300 values = self._GetGitHubValues()
1301 if not values:
1302 return
1303
1304 login_value = None
1305 for value_index, value in enumerate(values):
1306 if value == u'login':
1307 login_value = values[value_index + 1]
1308
1309 # If the next field is 'password' we assume the login field is empty.
1310 if login_value != u'password':
1311 return login_value
1312
1313
1314 class ReviewFile(object):
1315 """Class that defines a review file.
1316
1317 A review file is use to track code review relevant information like the
1318 codereview issue number. It is stored in the .review subdirectory and
1319 named after the feature branch e.g. ".review/feature".
1320 """
1321
1322 def __init__(self, branch_name):
1323 """Initializes a review file object.
1324
1325 Args:
1326 branch_name (str): name of the feature branch of the review.
1327 """
1328 super(ReviewFile, self).__init__()
1329 self._contents = None
1330 self._path = os.path.join(u'.review', branch_name)
1331
1332 if os.path.exists(self._path):
1333 with open(self._path, 'r') as file_object:
1334 self._contents = file_object.read()
1335
1336 def Create(self, codereview_issue_number):
1337 """Creates a new review file.
1338
1339 If the .review directory does not exist, it will be created.
1340
1341 Args:
1342 codereview_issue_number (int|str): codereview issue number.
1343
1344 Returns:
1345 bool: True if the review file was created.
1346 """
1347 if not os.path.exists(u'.review'):
1348 os.mkdir(u'.review')
1349 with open(self._path, 'w') as file_object:
1350 file_object.write(u'{0!s}'.format(codereview_issue_number))
1351
1352 def Exists(self):
1353 """Determines if the review file exists.
1354
1355 Returns:
1356 bool: True if review file exists.
1357 """
1358 return os.path.exists(self._path)
1359
1360 def GetCodeReviewIssueNumber(self):
1361 """Retrieves the codereview issue number.
1362
1363 Returns:
1364 int: codereview issue number.
1365 """
1366 if not self._contents:
1367 return
1368
1369 try:
1370 return int(self._contents, 10)
1371 except ValueError:
1372 pass
1373
1374 def Remove(self):
1375 """Removes the review file."""
1376 if not os.path.exists(self._path):
1377 return
1378
1379 os.remove(self._path)
1380
1381
1382 class ReviewHelper(object):
1383 """Class that defines review helper functions."""
1384
1385 _PROJECT_NAME_PREFIX_REGEX = re.compile(
1386 r'\[({0:s})\] '.format(u'|'.join(ProjectHelper.SUPPORTED_PROJECTS)))
1387
1388 def __init__(
1389 self, command, github_origin, feature_branch, diffbase, all_files=False,
1390 no_browser=False, no_confirm=False):
1391 """Initializes a review helper object.
1392
1393 Args:
1394 command (str): user provided command, for example "create", "lint".
1395 github_origin (str): github origin.
1396 feature_branch (str): feature branch.
1397 diffbase (str): diffbase.
1398 all_files (Optional[bool]): True if the command should apply to all
1399 files. Currently this only affects the lint command.
1400 no_browser (Optional[bool]): True if the functionality to use the
1401 webbrowser to get the OAuth token should be disabled.
1402 no_confirm (Optional[bool]): True if the defaults should be applied
1403 without confirmation.
1404 """
1405 super(ReviewHelper, self).__init__()
1406 self._active_branch = None
1407 self._all_files = all_files
1408 self._codereview_helper = None
1409 self._command = command
1410 self._diffbase = diffbase
1411 self._feature_branch = feature_branch
1412 self._git_helper = None
1413 self._git_repo_url = None
1414 self._github_helper = None
1415 self._github_origin = github_origin
1416 self._fork_feature_branch = None
1417 self._fork_username = None
1418 self._merge_author = None
1419 self._merge_description = None
1420 self._no_browser = no_browser
1421 self._no_confirm = no_confirm
1422 self._project_helper = None
1423 self._project_name = None
1424 self._sphinxapidoc_helper = None
1425
1426 def CheckLocalGitState(self):
1427 """Checks the state of the local git repository.
1428
1429 Returns:
1430 bool: True if the state of the local git repository is sane.
1431 """
1432 if self._command in (u'close', u'create', u'lint', u'update'):
1433 if not self._git_helper.CheckHasProjectUpstream():
1434 print(u'{0:s} aborted - missing project upstream.'.format(
1435 self._command.title()))
1436 print(u'Run: git remote add upstream {0:s}'.format(self._git_repo_url))
1437 return False
1438
1439 elif self._command == u'merge':
1440 if not self._git_helper.CheckHasProjectOrigin():
1441 print(u'{0:s} aborted - missing project origin.'.format(
1442 self._command.title()))
1443 return False
1444
1445 if self._command not in (
1446 u'lint', u'test', u'update-version', u'update_version'):
1447 if self._git_helper.CheckHasUncommittedChanges():
1448 print(u'{0:s} aborted - detected uncommitted changes.'.format(
1449 self._command.title()))
1450 print(u'Run: git commit')
1451 return False
1452
1453 self._active_branch = self._git_helper.GetActiveBranch()
1454 if self._command in (u'create', u'update'):
1455 if self._active_branch == u'master':
1456 print(u'{0:s} aborted - active branch is master.'.format(
1457 self._command.title()))
1458 return False
1459
1460 elif self._command == u'close':
1461 if self._feature_branch == u'master':
1462 print(u'{0:s} aborted - feature branch cannot be master.'.format(
1463 self._command.title()))
1464 return False
1465
1466 if self._active_branch != u'master':
1467 self._git_helper.SwitchToMasterBranch()
1468 self._active_branch = u'master'
1469
1470 return True
1471
1472 def CheckRemoteGitState(self):
1473 """Checks the state of the remote git repository.
1474
1475 Returns:
1476 bool: True if the state of the remote git repository is sane.
1477 """
1478 if self._command == u'close':
1479 if not self._git_helper.SynchronizeWithUpstream():
1480 print((
1481 u'{0:s} aborted - unable to synchronize with '
1482 u'upstream/master.').format(self._command.title()))
1483 return False
1484
1485 elif self._command in (u'create', u'update'):
1486 if not self._git_helper.CheckSynchronizedWithUpstream():
1487 if not self._git_helper.SynchronizeWithUpstream():
1488 print((
1489 u'{0:s} aborted - unable to synchronize with '
1490 u'upstream/master.').format(self._command.title()))
1491 return False
1492
1493 force_push = True
1494 else:
1495 force_push = False
1496
1497 if not self._git_helper.PushToOrigin(
1498 self._active_branch, force=force_push):
1499 print(u'{0:s} aborted - unable to push updates to origin/{1:s}.'.format(
1500 self._command.title(), self._active_branch))
1501 return False
1502
1503 elif self._command == u'lint':
1504 self._git_helper.CheckSynchronizedWithUpstream()
1505
1506 elif self._command == u'merge':
1507 if not self._git_helper.SynchronizeWithOrigin():
1508 print((
1509 u'{0:s} aborted - unable to synchronize with '
1510 u'origin/master.').format(self._command.title()))
1511 return False
1512
1513 return True
1514
1515 def Close(self):
1516 """Closes a review.
1517
1518 Returns:
1519 bool: True if the close was successful.
1520 """
1521 review_file = ReviewFile(self._feature_branch)
1522 if not review_file.Exists():
1523 print(u'Review file missing for branch: {0:s}'.format(
1524 self._feature_branch))
1525 return False
1526
1527 if not self._git_helper.CheckHasBranch(self._feature_branch):
1528 print(u'No such feature branch: {0:s}'.format(self._feature_branch))
1529 else:
1530 self._git_helper.RemoveFeatureBranch(self._feature_branch)
1531
1532 codereview_issue_number = review_file.GetCodeReviewIssueNumber()
1533
1534 review_file.Remove()
1535
1536 if codereview_issue_number:
1537 if not self._codereview_helper.CloseIssue(codereview_issue_number):
1538 print(u'Unable to close code review: {0!s}'.format(
1539 codereview_issue_number))
1540 print((
1541 u'Close it manually on: https://codereview.appspot.com/'
1542 u'{0!s}').format(codereview_issue_number))
1543
1544 return True
1545
1546 def Create(self):
1547 """Creates a review.
1548
1549 Returns:
1550 bool: True if the create was successful.
1551 """
1552 review_file = ReviewFile(self._active_branch)
1553 if review_file.Exists():
1554 print(u'Review file already exists for branch: {0:s}'.format(
1555 self._active_branch))
1556 return False
1557
1558 git_origin = self._git_helper.GetRemoteOrigin()
1559 if not git_origin.startswith(u'https://github.com/'):
1560 print(u'{0:s} aborted - unsupported git remote origin: {1:s}'.format(
1561 self._command.title(), git_origin))
1562 print(u'Make sure the git remote origin is hosted on github.com')
1563 return False
1564
1565 git_origin, _, _ = git_origin[len(u'https://github.com/'):].rpartition(u'/')
1566
1567 netrc_file = NetRCFile()
1568 github_access_token = netrc_file.GetGitHubAccessToken()
1569 if not github_access_token:
1570 print(u'{0:s} aborted - unable to determine github access token.'.format(
1571 self._command.title()))
1572 print(u'Make sure .netrc is configured with a github access token.')
1573 return False
1574
1575 last_commit_message = self._git_helper.GetLastCommitMessage()
1576 print(u'Automatic generated description of code review:')
1577 print(last_commit_message)
1578 print(u'')
1579
1580 if self._no_confirm:
1581 user_input = None
1582 else:
1583 print(u'Enter a description for the code review or hit enter to use the')
1584 print(u'automatic generated one:')
1585 user_input = sys.stdin.readline()
1586 user_input = user_input.strip()
1587
1588 if not user_input:
1589 description = last_commit_message
1590 else:
1591 description = user_input
1592
1593 # Prefix the description with the project name for code review to make it
1594 # easier to distinguish between projects.
1595 code_review_description = u'[{0:s}] {1:s}'.format(
1596 self._project_name, description)
1597
1598 codereview_issue_number = self._codereview_helper.CreateIssue(
1599 self._diffbase, code_review_description)
1600 if not codereview_issue_number:
1601 print(u'{0:s} aborted - unable to create codereview issue.'.format(
1602 self._command.title()))
1603 return False
1604
1605 if not os.path.isdir(u'.review'):
1606 os.mkdir(u'.review')
1607
1608 review_file.Create(codereview_issue_number)
1609
1610 create_github_origin = u'{0:s}:{1:s}'.format(
1611 git_origin, self._active_branch)
1612 if not self._github_helper.CreatePullRequest(
1613 github_access_token, codereview_issue_number, create_github_origin,
1614 description):
1615 print(u'Unable to create pull request.')
1616
1617 return True
1618
1619 def InitializeHelpers(self):
1620 """Initializes the helper objects.
1621
1622 Returns:
1623 bool: True if the helper initialization was successful.
1624 """
1625 script_path = os.path.abspath(__file__)
1626
1627 self._project_helper = ProjectHelper(script_path)
1628
1629 self._project_name = self._project_helper.project_name
1630 if not self._project_name:
1631 print(u'{0:s} aborted - unable to determine project name.'.format(
1632 self._command.title()))
1633 return False
1634
1635 self._git_repo_url = b'https://github.com/log2timeline/{0:s}.git'.format(
1636 self._project_name)
1637
1638 self._git_helper = GitHelper(self._git_repo_url)
1639
1640 self._github_helper = GitHubHelper(u'log2timeline', self._project_name)
1641
1642 if self._command in (u'close', u'create', u'merge', u'update'):
1643 email_address = self._git_helper.GetEmailAddress()
1644 self._codereview_helper = CodeReviewHelper(
1645 email_address, no_browser=self._no_browser)
1646
1647 if self._command == u'merge':
1648 self._sphinxapidoc_helper = SphinxAPIDocHelper(
1649 self._project_name)
1650 # TODO: disable the version check for now since sphinx-apidoc 1.2.2
1651 # on Unbuntu 14.04 does not have the --version option. Re-enable when
1652 # sphinx-apidoc 1.2.3 or later is introduced.
1653 # if not self._sphinxapidoc_helper.CheckUpToDateVersion():
1654 # print((
1655 # u'{0:s} aborted - sphinx-apidoc verion 1.2.0 or later '
1656 # u'required.').format(self._command.title()))
1657 # return False
1658
1659 return True
1660
1661 def Lint(self):
1662 """Lints a review.
1663
1664 Returns:
1665 bool: True if linting was successful.
1666 """
1667 if self._project_name == u'l2tdocs':
1668 return True
1669
1670 if self._command not in (u'create', u'merge', u'lint', u'update'):
1671 return True
1672
1673 pylint_helper = PylintHelper()
1674 if not pylint_helper.CheckUpToDateVersion():
1675 print(u'{0:s} aborted - pylint verion 1.5.0 or later required.'.format(
1676 self._command.title()))
1677 return False
1678
1679 if self._command == u'merge':
1680 fork_git_repo_url = self._github_helper.GetForkGitRepoUrl(
1681 self._fork_username)
1682
1683 if not self._git_helper.PullFromFork(
1684 fork_git_repo_url, self._fork_feature_branch):
1685 print(u'{0:s} aborted - unable to pull changes from fork.'.format(
1686 self._command.title()))
1687 return False
1688
1689 if self._all_files:
1690 diffbase = None
1691 elif self._command == u'merge':
1692 diffbase = u'origin/master'
1693 else:
1694 diffbase = self._diffbase
1695
1696 changed_python_files = self._git_helper.GetChangedPythonFiles(
1697 diffbase=diffbase)
1698
1699 if not pylint_helper.CheckFiles(changed_python_files):
1700 print(u'{0:s} aborted - unable to pass linter.'.format(
1701 self._command.title()))
1702
1703 if self._command == u'merge':
1704 self._git_helper.DropUncommittedChanges()
1705 return False
1706
1707 return True
1708
1709 def Merge(self, codereview_issue_number):
1710 """Merges a review.
1711
1712 Args:
1713 codereview_issue_number (int|str): codereview issue number.
1714
1715 Returns:
1716 bool: True if the merge was successful.
1717 """
1718 if not self._project_helper.UpdateVersionFile():
1719 print(u'Unable to update version file.')
1720 self._git_helper.DropUncommittedChanges()
1721 return False
1722
1723 if not self._project_helper.UpdateDpkgChangelogFile():
1724 print(u'Unable to update dpkg changelog file.')
1725 self._git_helper.DropUncommittedChanges()
1726 return False
1727
1728 apidoc_config_path = os.path.join(u'docs', u'conf.py')
1729 if os.path.exists(apidoc_config_path):
1730 self._sphinxapidoc_helper.UpdateAPIDocs()
1731 self._git_helper.AddPath(u'docs')
1732
1733 readthedocs_helper = ReadTheDocsHelper(self._project_name)
1734
1735 # The project wiki repo contains the documentation and
1736 # has no trigger on update webhook for readthedocs.
1737 # So we trigger readthedocs directly to build the docs.
1738 readthedocs_helper.TriggerBuild()
1739
1740 if not self._git_helper.CommitToOriginInNameOf(
1741 codereview_issue_number, self._merge_author, self._merge_description):
1742 print(u'Unable to commit changes.')
1743 self._git_helper.DropUncommittedChanges()
1744 return False
1745
1746 commit_message = (
1747 u'Changes have been merged with master branch. '
1748 u'To close the review and clean up the feature branch you can run: '
1749 u'python ./utils/review.py close {0:s}').format(
1750 self._fork_feature_branch)
1751 self._codereview_helper.AddMergeMessage(
1752 codereview_issue_number, commit_message)
1753
1754 return True
1755
1756 def Open(self, codereview_issue_number):
1757 """Opens a review.
1758
1759 Args:
1760 codereview_issue_number (int|str): codereview issue number.
1761
1762 Returns:
1763 bool: True if the open was successful.
1764 """
1765 # TODO: implement.
1766 # * check if feature branch exists
1767 # * check if review file exists
1768 # * check if issue number corresponds to branch by checking PR?
1769 # * create feature branch and pull changes from origin
1770 # * create review file
1771 _ = codereview_issue_number
1772
1773 return False
1774
1775 def PrepareMerge(self, codereview_issue_number):
1776 """Prepares a merge.
1777
1778 Args:
1779 codereview_issue_number (int|str): codereview issue number.
1780
1781 Returns:
1782 bool: True if the prepare were successful.
1783 """
1784 codereview_information = self._codereview_helper.QueryIssue(
1785 codereview_issue_number)
1786 if not codereview_information:
1787 print((
1788 u'{0:s} aborted - unable to retrieve code review: {1!s} '
1789 u'information.').format(
1790 self._command.title(), codereview_issue_number))
1791 return False
1792
1793 self._merge_description = codereview_information.get(u'subject', None)
1794 if not self._merge_description:
1795 print((
1796 u'{0:s} aborted - unable to determine description of code review: '
1797 u'{1!s}.').format(
1798 self._command.title(), codereview_issue_number))
1799 return False
1800
1801 # When merging remove the project name ("[project]") prefix from
1802 # the code review description.
1803 self._merge_description = self._PROJECT_NAME_PREFIX_REGEX.sub(
1804 u'', self._merge_description)
1805
1806 merge_email_address = codereview_information.get(u'owner_email', None)
1807 if not merge_email_address:
1808 print((
1809 u'{0:s} aborted - unable to determine email address of owner of '
1810 u'code review: {1!s}.').format(
1811 self._command.title(), codereview_issue_number))
1812 return False
1813
1814 self._fork_username, _, self._fork_feature_branch = (
1815 self._github_origin.partition(u':'))
1816
1817 github_user_information = self._github_helper.QueryUser(
1818 self._fork_username)
1819 if not github_user_information:
1820 print((
1821 u'{0:s} aborted - unable to retrieve github user: {1:s} '
1822 u'information.').format(
1823 self._command.title(), self._fork_username))
1824 return False
1825
1826 merge_fullname = github_user_information.get(u'name', None)
1827 if not merge_fullname:
1828 merge_fullname = codereview_information.get(u'owner', None)
1829 if not merge_fullname:
1830 merge_fullname = github_user_information.get(u'company', None)
1831 if not merge_fullname:
1832 print((
1833 u'{0:s} aborted - unable to determine full name.').format(
1834 self._command.title()))
1835 return False
1836
1837 self._merge_author = u'{0:s} <{1:s}>'.format(
1838 merge_fullname, merge_email_address)
1839
1840 return True
1841
1842 def Test(self):
1843 """Tests a review.
1844
1845 Returns:
1846 bool: True if the tests were successful.
1847 """
1848 if self._project_name == u'l2tdocs':
1849 return True
1850
1851 if self._command not in (u'create', u'merge', u'test', u'update'):
1852 return True
1853
1854 # TODO: determine why this alters the behavior of argparse.
1855 # Currently affects this script being used in plaso.
1856 command = u'{0:s} run_tests.py'.format(sys.executable)
1857 exit_code = subprocess.call(command, shell=True)
1858 if exit_code != 0:
1859 print(u'{0:s} aborted - unable to pass tests.'.format(
1860 self._command.title()))
1861
1862 if self._command == u'merge':
1863 self._git_helper.DropUncommittedChanges()
1864 return False
1865
1866 return True
1867
1868 def Update(self):
1869 """Updates a review.
1870
1871 Returns:
1872 bool: True if the update was successful.
1873 """
1874 review_file = ReviewFile(self._active_branch)
1875 if not review_file.Exists():
1876 print(u'Review file missing for branch: {0:s}'.format(
1877 self._active_branch))
1878 return False
1879
1880 codereview_issue_number = review_file.GetCodeReviewIssueNumber()
1881
1882 last_commit_message = self._git_helper.GetLastCommitMessage()
1883 print(u'Automatic generated description of the update:')
1884 print(last_commit_message)
1885 print(u'')
1886
1887 if self._no_confirm:
1888 user_input = None
1889 else:
1890 print(u'Enter a description for the update or hit enter to use the')
1891 print(u'automatic generated one:')
1892 user_input = sys.stdin.readline()
1893 user_input = user_input.strip()
1894
1895 if not user_input:
1896 description = last_commit_message
1897 else:
1898 description = user_input
1899
1900 if not self._codereview_helper.UpdateIssue(
1901 codereview_issue_number, self._diffbase, description):
1902 print(u'Unable to update code review: {0!s}'.format(
1903 codereview_issue_number))
1904 return False
1905
1906 return True
1907
1908 def UpdateAuthors(self):
1909 """Updates the authors.
1910
1911 Returns:
1912 bool: True if the authors update was successful.
1913 """
1914 if self._project_name == u'l2tdocs':
1915 return True
1916
1917 if not self._project_helper.UpdateAuthorsFile():
1918 print(u'Unable to update authors file.')
1919 return False
1920
1921 return True
1922
1923 def UpdateVersion(self):
1924 """Updates the version.
1925
1926 Returns:
1927 bool: True if the version update was successful.
1928 """
1929 if self._project_name == u'l2tdocs':
1930 return True
1931
1932 if not self._project_helper.UpdateVersionFile():
1933 print(u'Unable to update version file.')
1934 return False
1935
1936 if not self._project_helper.UpdateDpkgChangelogFile():
1937 print(u'Unable to update dpkg changelog file.')
1938 return False
1939
1940 return True
1941
1942
1943 def Main():
1944 """The main program function.
1945
1946 Returns:
1947 bool: True if successful or False if not.
1948 """
1949 argument_parser = argparse.ArgumentParser(
1950 description=u'Script to manage code reviews.')
1951
1952 # TODO: add option to directly pass code review issue number.
1953
1954 argument_parser.add_argument(
1955 u'--allfiles', u'--all-files', u'--all_files', dest=u'all_files',
1956 action=u'store_true', default=False, help=(
1957 u'Apply command to all files, currently only affects the lint '
1958 u'command.'))
1959
1960 argument_parser.add_argument(
1961 u'--diffbase', dest=u'diffbase', action=u'store', type=str,
1962 metavar=u'DIFFBASE', default=u'upstream/master', help=(
1963 u'The diffbase the default is upstream/master. This options is used '
1964 u'to indicate to what "base" the code changes are relative to and '
1965 u'can be used to "chain" code reviews.'))
1966
1967 argument_parser.add_argument(
1968 u'--nobrowser', u'--no-browser', u'--no_browser', dest=u'no_browser',
1969 action=u'store_true', default=False, help=(
1970 u'Disable the functionality to use the webbrowser to get the OAuth '
1971 u'token should be disabled.'))
1972
1973 argument_parser.add_argument(
1974 u'--noconfirm', u'--no-confirm', u'--no_confirm', dest=u'no_confirm',
1975 action=u'store_true', default=False, help=(
1976 u'Do not ask for confirmation apply defaults.\n'
1977 u'WARNING: only use this when you are familiar with the defaults.'))
1978
1979 argument_parser.add_argument(
1980 u'--offline', dest=u'offline', action=u'store_true', default=False, help=(
1981 u'The review script is running offline and any online check is '
1982 u'skipped.'))
1983
1984 commands_parser = argument_parser.add_subparsers(dest=u'command')
1985
1986 close_command_parser = commands_parser.add_parser(u'close')
1987
1988 # TODO: add this to help output.
1989 close_command_parser.add_argument(
1990 u'branch', action=u'store', metavar=u'BRANCH', default=None,
1991 help=u'name of the corresponding feature branch.')
1992
1993 commands_parser.add_parser(u'create')
1994
1995 merge_command_parser = commands_parser.add_parser(u'merge')
1996
1997 # TODO: add this to help output.
1998 merge_command_parser.add_argument(
1999 u'codereview_issue_number', action=u'store',
2000 metavar=u'CODEREVIEW_ISSUE_NUMBER', default=None,
2001 help=u'the codereview issue number to be merged.')
2002
2003 # TODO: add this to help output.
2004 merge_command_parser.add_argument(
2005 u'github_origin', action=u'store',
2006 metavar=u'GITHUB_ORIGIN', default=None,
2007 help=u'the github origin to merged e.g. username:feature.')
2008
2009 commands_parser.add_parser(u'lint')
2010
2011 open_command_parser = commands_parser.add_parser(u'open')
2012
2013 # TODO: add this to help output.
2014 open_command_parser.add_argument(
2015 u'codereview_issue_number', action=u'store',
2016 metavar=u'CODEREVIEW_ISSUE_NUMBER', default=None,
2017 help=u'the codereview issue number to be opened.')
2018
2019 # TODO: add this to help output.
2020 open_command_parser.add_argument(
2021 u'branch', action=u'store', metavar=u'BRANCH', default=None,
2022 help=u'name of the corresponding feature branch.')
2023
2024 # TODO: add submit option?
2025
2026 commands_parser.add_parser(u'test')
2027
2028 # TODO: add dry-run option to run merge without commit.
2029 # useful to test pending CLs.
2030
2031 commands_parser.add_parser(u'update')
2032
2033 commands_parser.add_parser(u'update-authors')
2034 commands_parser.add_parser(u'update_authors')
2035
2036 commands_parser.add_parser(u'update-version')
2037 commands_parser.add_parser(u'update_version')
2038
2039 options = argument_parser.parse_args()
2040
2041 codereview_issue_number = None
2042 feature_branch = None
2043 github_origin = None
2044
2045 print_help_on_error = False
2046 if options.command in (u'close', u'open'):
2047 feature_branch = getattr(options, u'branch', None)
2048 if not feature_branch:
2049 print(u'Feature branch value is missing.')
2050 print_help_on_error = True
2051
2052 # Support "username:branch" notation.
2053 if u':' in feature_branch:
2054 _, _, feature_branch = feature_branch.rpartition(u':')
2055
2056 if options.command in (u'merge', u'open'):
2057 codereview_issue_number = getattr(
2058 options, u'codereview_issue_number', None)
2059 if not codereview_issue_number:
2060 print(u'Codereview issue number value is missing.')
2061 print_help_on_error = True
2062
2063 if options.command == u'merge':
2064 github_origin = getattr(options, u'github_origin', None)
2065 if not github_origin:
2066 print(u'Github origin value is missing.')
2067 print_help_on_error = True
2068
2069 if options.offline and options.command not in (u'lint', u'test'):
2070 print(u'Cannot run: {0:s} in offline mode.'.format(options.command))
2071 print_help_on_error = True
2072
2073 if print_help_on_error:
2074 print(u'')
2075 argument_parser.print_help()
2076 print(u'')
2077 return False
2078
2079 home_path = os.path.expanduser(u'~')
2080 netrc_path = os.path.join(home_path, u'.netrc')
2081 if not os.path.exists(netrc_path):
2082 print(u'{0:s} aborted - unable to find .netrc.'.format(
2083 options.command.title()))
2084 return False
2085
2086 review_helper = ReviewHelper(
2087 options.command, github_origin, feature_branch,
2088 options.diffbase, all_files=options.all_files,
2089 no_browser=options.no_browser, no_confirm=options.no_confirm)
2090
2091 if not review_helper.InitializeHelpers():
2092 return False
2093
2094 if not review_helper.CheckLocalGitState():
2095 return False
2096
2097 if not options.offline and not review_helper.CheckRemoteGitState():
2098 return False
2099
2100 if options.command == u'merge':
2101 if not review_helper.PrepareMerge(codereview_issue_number):
2102 return False
2103
2104 if not review_helper.Lint():
2105 return False
2106
2107 if not review_helper.Test():
2108 return False
2109
2110 result = False
2111 if options.command == u'create':
2112 result = review_helper.Create()
2113
2114 elif options.command == u'close':
2115 result = review_helper.Close()
2116
2117 elif options.command in (u'lint', u'test'):
2118 result = True
2119
2120 elif options.command == u'merge':
2121 result = review_helper.Merge(codereview_issue_number)
2122
2123 elif options.command == u'open':
2124 result = review_helper.Open(codereview_issue_number)
2125
2126 elif options.command == u'update':
2127 result = review_helper.Update()
2128
2129 elif options.command in (u'update-authors', u'update_authors'):
2130 result = review_helper.UpdateAuthors()
2131
2132 elif options.command in (u'update-version', u'update_version'):
2133 result = review_helper.UpdateVersion()
2134
2135 return result
2136
2137
2138 if __name__ == u'__main__':
2139 if not Main():
2140 sys.exit(1)
2141 else:
2142 sys.exit(0)
0 #!/usr/bin/env python
1 # coding: utf-8
2 #
3 # This file originates from the Rietveld project:
4 # https://code.google.com/p/rietveld/
5 #
6 # Copyright 2007 Google Inc.
7 #
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
11 #
12 # http://www.apache.org/licenses/LICENSE-2.0
13 #
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
19
20 """Tool for uploading diffs from a version control system to the codereview app.
21
22 Usage summary: upload.py [options] [-- diff_options] [path...]
23
24 Diff options are passed to the diff command of the underlying system.
25
26 Supported version control systems:
27 Git
28 Mercurial
29 Subversion
30 Perforce
31 CVS
32
33 It is important for Git/Mercurial users to specify a tree/node/branch to diff
34 against by using the '--rev' option.
35 """
36 # This code is derived from appcfg.py in the App Engine SDK (open source),
37 # and from ASPN recipe #146306.
38
39 import BaseHTTPServer
40 import ConfigParser
41 import cookielib
42 import errno
43 import fnmatch
44 import getpass
45 import logging
46 import marshal
47 import mimetypes
48 import optparse
49 import os
50 import re
51 import socket
52 import subprocess
53 import sys
54 import urllib
55 import urllib2
56 import urlparse
57 import webbrowser
58
59 # The md5 module was deprecated in Python 2.5.
60 try:
61 from hashlib import md5
62 except ImportError:
63 from md5 import md5
64
65 try:
66 import readline
67 except ImportError:
68 pass
69
70 try:
71 import keyring
72 except ImportError:
73 keyring = None
74
75 # The logging verbosity:
76 # 0: Errors only.
77 # 1: Status messages.
78 # 2: Info logs.
79 # 3: Debug logs.
80 verbosity = 1
81
82 # The account type used for authentication.
83 # This line could be changed by the review server (see handler for
84 # upload.py).
85 AUTH_ACCOUNT_TYPE = "GOOGLE"
86
87 # URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be
88 # changed by the review server (see handler for upload.py).
89 DEFAULT_REVIEW_SERVER = "codereview.appspot.com"
90
91 # Max size of patch or base file.
92 MAX_UPLOAD_SIZE = 900 * 1024
93
94 # Constants for version control names. Used by GuessVCSName.
95 VCS_GIT = "Git"
96 VCS_MERCURIAL = "Mercurial"
97 VCS_SUBVERSION = "Subversion"
98 VCS_PERFORCE = "Perforce"
99 VCS_CVS = "CVS"
100 VCS_UNKNOWN = "Unknown"
101
102 VCS_ABBREVIATIONS = {
103 VCS_MERCURIAL.lower(): VCS_MERCURIAL,
104 "hg": VCS_MERCURIAL,
105 VCS_SUBVERSION.lower(): VCS_SUBVERSION,
106 "svn": VCS_SUBVERSION,
107 VCS_PERFORCE.lower(): VCS_PERFORCE,
108 "p4": VCS_PERFORCE,
109 VCS_GIT.lower(): VCS_GIT,
110 VCS_CVS.lower(): VCS_CVS,
111 }
112
113 # OAuth 2.0-Related Constants
114 LOCALHOST_IP = '127.0.0.1'
115 DEFAULT_OAUTH2_PORT = 8001
116 ACCESS_TOKEN_PARAM = 'access_token'
117 ERROR_PARAM = 'error'
118 OAUTH_DEFAULT_ERROR_MESSAGE = 'OAuth 2.0 error occurred.'
119 OAUTH_PATH = '/get-access-token'
120 OAUTH_PATH_PORT_TEMPLATE = OAUTH_PATH + '?port=%(port)d'
121 AUTH_HANDLER_RESPONSE = """\
122 <html>
123 <head>
124 <title>Authentication Status</title>
125 <script>
126 window.onload = function() {
127 window.close();
128 }
129 </script>
130 </head>
131 <body>
132 <p>The authentication flow has completed.</p>
133 </body>
134 </html>
135 """
136 # Borrowed from google-api-python-client
137 OPEN_LOCAL_MESSAGE_TEMPLATE = """\
138 Your browser has been opened to visit:
139
140 %s
141
142 If your browser is on a different machine then exit and re-run
143 upload.py with the command-line parameter
144
145 --no_oauth2_webbrowser
146 """
147 NO_OPEN_LOCAL_MESSAGE_TEMPLATE = """\
148 Go to the following link in your browser:
149
150 %s
151
152 and copy the access token.
153 """
154
155 # The result of parsing Subversion's [auto-props] setting.
156 svn_auto_props_map = None
157
158 def GetEmail(prompt):
159 """Prompts the user for their email address and returns it.
160
161 The last used email address is saved to a file and offered up as a suggestion
162 to the user. If the user presses enter without typing in anything the last
163 used email address is used. If the user enters a new address, it is saved
164 for next time we prompt.
165
166 """
167 last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
168 last_email = ""
169 if os.path.exists(last_email_file_name):
170 try:
171 last_email_file = open(last_email_file_name, "r")
172 last_email = last_email_file.readline().strip("\n")
173 last_email_file.close()
174 prompt += " [%s]" % last_email
175 except IOError, e:
176 pass
177 email = raw_input(prompt + ": ").strip()
178 if email:
179 try:
180 last_email_file = open(last_email_file_name, "w")
181 last_email_file.write(email)
182 last_email_file.close()
183 except IOError, e:
184 pass
185 else:
186 email = last_email
187 return email
188
189
190 def StatusUpdate(msg):
191 """Print a status message to stdout.
192
193 If 'verbosity' is greater than 0, print the message.
194
195 Args:
196 msg: The string to print.
197 """
198 if verbosity > 0:
199 print msg
200
201
202 def ErrorExit(msg):
203 """Print an error message to stderr and exit."""
204 print >>sys.stderr, msg
205 sys.exit(1)
206
207
208 class ClientLoginError(urllib2.HTTPError):
209 """Raised to indicate there was an error authenticating with ClientLogin."""
210
211 def __init__(self, url, code, msg, headers, args):
212 urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
213 self.args = args
214 self._reason = args["Error"]
215 self.info = args.get("Info", None)
216
217 @property
218 def reason(self):
219 # reason is a property on python 2.7 but a member variable on <=2.6.
220 # self.args is modified so it cannot be used as-is so save the value in
221 # self._reason.
222 return self._reason
223
224
225 class AbstractRpcServer(object):
226 """Provides a common interface for a simple RPC server."""
227
228 def __init__(self, host, auth_function, host_override=None,
229 extra_headers=None, save_cookies=False,
230 account_type=AUTH_ACCOUNT_TYPE):
231 """Creates a new AbstractRpcServer.
232
233 Args:
234 host: The host to send requests to.
235 auth_function: A function that takes no arguments and returns an
236 (email, password) tuple when called. Will be called if authentication
237 is required.
238 host_override: The host header to send to the server (defaults to host).
239 extra_headers: A dict of extra headers to append to every request.
240 save_cookies: If True, save the authentication cookies to local disk.
241 If False, use an in-memory cookiejar instead. Subclasses must
242 implement this functionality. Defaults to False.
243 account_type: Account type used for authentication. Defaults to
244 AUTH_ACCOUNT_TYPE.
245 """
246 self.host = host
247 if (not self.host.startswith("http://") and
248 not self.host.startswith("https://")):
249 self.host = "http://" + self.host
250 self.host_override = host_override
251 self.auth_function = auth_function
252 self.authenticated = False
253 self.extra_headers = extra_headers or {}
254 self.save_cookies = save_cookies
255 self.account_type = account_type
256 self.opener = self._GetOpener()
257 if self.host_override:
258 logging.info("Server: %s; Host: %s", self.host, self.host_override)
259 else:
260 logging.info("Server: %s", self.host)
261
262 def _GetOpener(self):
263 """Returns an OpenerDirector for making HTTP requests.
264
265 Returns:
266 A urllib2.OpenerDirector object.
267 """
268 raise NotImplementedError()
269
270 def _CreateRequest(self, url, data=None):
271 """Creates a new urllib request."""
272 logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
273 req = urllib2.Request(url, data=data, headers={"Accept": "text/plain"})
274 if self.host_override:
275 req.add_header("Host", self.host_override)
276 for key, value in self.extra_headers.iteritems():
277 req.add_header(key, value)
278 return req
279
280 def _GetAuthToken(self, email, password):
281 """Uses ClientLogin to authenticate the user, returning an auth token.
282
283 Args:
284 email: The user's email address
285 password: The user's password
286
287 Raises:
288 ClientLoginError: If there was an error authenticating with ClientLogin.
289 HTTPError: If there was some other form of HTTP error.
290
291 Returns:
292 The authentication token returned by ClientLogin.
293 """
294 account_type = self.account_type
295 if self.host.endswith(".google.com"):
296 # Needed for use inside Google.
297 account_type = "HOSTED"
298 req = self._CreateRequest(
299 url="https://www.google.com/accounts/ClientLogin",
300 data=urllib.urlencode({
301 "Email": email,
302 "Passwd": password,
303 "service": "ah",
304 "source": "rietveld-codereview-upload",
305 "accountType": account_type,
306 }),
307 )
308 try:
309 response = self.opener.open(req)
310 response_body = response.read()
311 response_dict = dict(x.split("=")
312 for x in response_body.split("\n") if x)
313 return response_dict["Auth"]
314 except urllib2.HTTPError, e:
315 if e.code == 403:
316 body = e.read()
317 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
318 raise ClientLoginError(req.get_full_url(), e.code, e.msg,
319 e.headers, response_dict)
320 else:
321 raise
322
323 def _GetAuthCookie(self, auth_token):
324 """Fetches authentication cookies for an authentication token.
325
326 Args:
327 auth_token: The authentication token returned by ClientLogin.
328
329 Raises:
330 HTTPError: If there was an error fetching the authentication cookies.
331 """
332 # This is a dummy value to allow us to identify when we're successful.
333 continue_location = "http://localhost/"
334 args = {"continue": continue_location, "auth": auth_token}
335 req = self._CreateRequest("%s/_ah/login?%s" %
336 (self.host, urllib.urlencode(args)))
337 try:
338 response = self.opener.open(req)
339 except urllib2.HTTPError, e:
340 response = e
341 if (response.code != 302 or
342 response.info()["location"] != continue_location):
343 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
344 response.headers, response.fp)
345 self.authenticated = True
346
347 def _Authenticate(self):
348 """Authenticates the user.
349
350 The authentication process works as follows:
351 1) We get a username and password from the user
352 2) We use ClientLogin to obtain an AUTH token for the user
353 (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
354 3) We pass the auth token to /_ah/login on the server to obtain an
355 authentication cookie. If login was successful, it tries to redirect
356 us to the URL we provided.
357
358 If we attempt to access the upload API without first obtaining an
359 authentication cookie, it returns a 401 response (or a 302) and
360 directs us to authenticate ourselves with ClientLogin.
361 """
362 for i in range(3):
363 credentials = self.auth_function()
364 try:
365 auth_token = self._GetAuthToken(credentials[0], credentials[1])
366 except ClientLoginError, e:
367 print >>sys.stderr, ''
368 if e.reason == "BadAuthentication":
369 if e.info == "InvalidSecondFactor":
370 print >>sys.stderr, (
371 "Use an application-specific password instead "
372 "of your regular account password.\n"
373 "See http://www.google.com/"
374 "support/accounts/bin/answer.py?answer=185833")
375 else:
376 print >>sys.stderr, "Invalid username or password."
377 elif e.reason == "CaptchaRequired":
378 print >>sys.stderr, (
379 "Please go to\n"
380 "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
381 "and verify you are a human. Then try again.\n"
382 "If you are using a Google Apps account the URL is:\n"
383 "https://www.google.com/a/yourdomain.com/UnlockCaptcha")
384 elif e.reason == "NotVerified":
385 print >>sys.stderr, "Account not verified."
386 elif e.reason == "TermsNotAgreed":
387 print >>sys.stderr, "User has not agreed to TOS."
388 elif e.reason == "AccountDeleted":
389 print >>sys.stderr, "The user account has been deleted."
390 elif e.reason == "AccountDisabled":
391 print >>sys.stderr, "The user account has been disabled."
392 break
393 elif e.reason == "ServiceDisabled":
394 print >>sys.stderr, ("The user's access to the service has been "
395 "disabled.")
396 elif e.reason == "ServiceUnavailable":
397 print >>sys.stderr, "The service is not available; try again later."
398 else:
399 # Unknown error.
400 raise
401 print >>sys.stderr, ''
402 continue
403 self._GetAuthCookie(auth_token)
404 return
405
406 def Send(self, request_path, payload=None,
407 content_type="application/octet-stream",
408 timeout=None,
409 extra_headers=None,
410 **kwargs):
411 """Sends an RPC and returns the response.
412
413 Args:
414 request_path: The path to send the request to, eg /api/appversion/create.
415 payload: The body of the request, or None to send an empty request.
416 content_type: The Content-Type header to use.
417 timeout: timeout in seconds; default None i.e. no timeout.
418 (Note: for large requests on OS X, the timeout doesn't work right.)
419 extra_headers: Dict containing additional HTTP headers that should be
420 included in the request (string header names mapped to their values),
421 or None to not include any additional headers.
422 kwargs: Any keyword arguments are converted into query string parameters.
423
424 Returns:
425 The response body, as a string.
426 """
427 # TODO: Don't require authentication. Let the server say
428 # whether it is necessary.
429 if not self.authenticated:
430 self._Authenticate()
431
432 old_timeout = socket.getdefaulttimeout()
433 socket.setdefaulttimeout(timeout)
434 try:
435 tries = 0
436 while True:
437 tries += 1
438 args = dict(kwargs)
439 url = "%s%s" % (self.host, request_path)
440 if args:
441 url += "?" + urllib.urlencode(args)
442 req = self._CreateRequest(url=url, data=payload)
443 req.add_header("Content-Type", content_type)
444 if extra_headers:
445 for header, value in extra_headers.items():
446 req.add_header(header, value)
447 try:
448 f = self.opener.open(req)
449 response = f.read()
450 f.close()
451 return response
452 except urllib2.HTTPError, e:
453 if tries > 3:
454 raise
455 elif e.code == 401 or e.code == 302:
456 self._Authenticate()
457 elif e.code == 301:
458 # Handle permanent redirect manually.
459 url = e.info()["location"]
460 url_loc = urlparse.urlparse(url)
461 self.host = '%s://%s' % (url_loc[0], url_loc[1])
462 elif e.code >= 500:
463 ErrorExit(e.read())
464 else:
465 raise
466 finally:
467 socket.setdefaulttimeout(old_timeout)
468
469
470 class HttpRpcServer(AbstractRpcServer):
471 """Provides a simplified RPC-style interface for HTTP requests."""
472
473 def _Authenticate(self):
474 """Save the cookie jar after authentication."""
475 if isinstance(self.auth_function, OAuth2Creds):
476 access_token = self.auth_function()
477 if access_token is not None:
478 self.extra_headers['Authorization'] = 'OAuth %s' % (access_token,)
479 self.authenticated = True
480 else:
481 super(HttpRpcServer, self)._Authenticate()
482 if self.save_cookies:
483 StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
484 self.cookie_jar.save()
485
486 def _GetOpener(self):
487 """Returns an OpenerDirector that supports cookies and ignores redirects.
488
489 Returns:
490 A urllib2.OpenerDirector object.
491 """
492 opener = urllib2.OpenerDirector()
493 opener.add_handler(urllib2.ProxyHandler())
494 opener.add_handler(urllib2.UnknownHandler())
495 opener.add_handler(urllib2.HTTPHandler())
496 opener.add_handler(urllib2.HTTPDefaultErrorHandler())
497 opener.add_handler(urllib2.HTTPSHandler())
498 opener.add_handler(urllib2.HTTPErrorProcessor())
499 if self.save_cookies:
500 self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
501 self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
502 if os.path.exists(self.cookie_file):
503 try:
504 self.cookie_jar.load()
505 self.authenticated = True
506 StatusUpdate("Loaded authentication cookies from %s" %
507 self.cookie_file)
508 except (cookielib.LoadError, IOError):
509 # Failed to load cookies - just ignore them.
510 pass
511 else:
512 # Create an empty cookie file with mode 600
513 fd = os.open(self.cookie_file, os.O_CREAT, 0600)
514 os.close(fd)
515 # Always chmod the cookie file
516 os.chmod(self.cookie_file, 0600)
517 else:
518 # Don't save cookies across runs of update.py.
519 self.cookie_jar = cookielib.CookieJar()
520 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
521 return opener
522
523
524 class CondensedHelpFormatter(optparse.IndentedHelpFormatter):
525 """Frees more horizontal space by removing indentation from group
526 options and collapsing arguments between short and long, e.g.
527 '-o ARG, --opt=ARG' to -o --opt ARG"""
528
529 def format_heading(self, heading):
530 return "%s:\n" % heading
531
532 def format_option(self, option):
533 self.dedent()
534 res = optparse.HelpFormatter.format_option(self, option)
535 self.indent()
536 return res
537
538 def format_option_strings(self, option):
539 self.set_long_opt_delimiter(" ")
540 optstr = optparse.HelpFormatter.format_option_strings(self, option)
541 optlist = optstr.split(", ")
542 if len(optlist) > 1:
543 if option.takes_value():
544 # strip METAVAR from all but the last option
545 optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:]
546 optstr = " ".join(optlist)
547 return optstr
548
549
550 parser = optparse.OptionParser(
551 usage=("%prog [options] [-- diff_options] [path...]\n"
552 "See also: http://code.google.com/p/rietveld/wiki/UploadPyUsage"),
553 add_help_option=False,
554 formatter=CondensedHelpFormatter()
555 )
556 parser.add_option("-h", "--help", action="store_true",
557 help="Show this help message and exit.")
558 parser.add_option("-y", "--assume_yes", action="store_true",
559 dest="assume_yes", default=False,
560 help="Assume that the answer to yes/no questions is 'yes'.")
561 # Logging
562 group = parser.add_option_group("Logging options")
563 group.add_option("-q", "--quiet", action="store_const", const=0,
564 dest="verbose", help="Print errors only.")
565 group.add_option("-v", "--verbose", action="store_const", const=2,
566 dest="verbose", default=1,
567 help="Print info level logs.")
568 group.add_option("--noisy", action="store_const", const=3,
569 dest="verbose", help="Print all logs.")
570 group.add_option("--print_diffs", dest="print_diffs", action="store_true",
571 help="Print full diffs.")
572 # Review server
573 group = parser.add_option_group("Review server options")
574 group.add_option("-s", "--server", action="store", dest="server",
575 default=DEFAULT_REVIEW_SERVER,
576 metavar="SERVER",
577 help=("The server to upload to. The format is host[:port]. "
578 "Defaults to '%default'."))
579 group.add_option("-e", "--email", action="store", dest="email",
580 metavar="EMAIL", default=None,
581 help="The username to use. Will prompt if omitted.")
582 group.add_option("-H", "--host", action="store", dest="host",
583 metavar="HOST", default=None,
584 help="Overrides the Host header sent with all RPCs.")
585 group.add_option("--no_cookies", action="store_false",
586 dest="save_cookies", default=True,
587 help="Do not save authentication cookies to local disk.")
588 group.add_option("--oauth2", action="store_true",
589 dest="use_oauth2", default=False,
590 help="Use OAuth 2.0 instead of a password.")
591 group.add_option("--oauth2_port", action="store", type="int",
592 dest="oauth2_port", default=DEFAULT_OAUTH2_PORT,
593 help=("Port to use to handle OAuth 2.0 redirect. Must be an "
594 "integer in the range 1024-49151, defaults to "
595 "'%default'."))
596 group.add_option("--no_oauth2_webbrowser", action="store_false",
597 dest="open_oauth2_local_webbrowser", default=True,
598 help="Don't open a browser window to get an access token.")
599 group.add_option("--account_type", action="store", dest="account_type",
600 metavar="TYPE", default=AUTH_ACCOUNT_TYPE,
601 choices=["GOOGLE", "HOSTED"],
602 help=("Override the default account type "
603 "(defaults to '%default', "
604 "valid choices are 'GOOGLE' and 'HOSTED')."))
605 # Issue
606 group = parser.add_option_group("Issue options")
607 group.add_option("-t", "--title", action="store", dest="title",
608 help="New issue subject or new patch set title")
609 group.add_option("-m", "--message", action="store", dest="message",
610 default=None,
611 help="New issue description or new patch set message")
612 group.add_option("-F", "--file", action="store", dest="file",
613 default=None, help="Read the message above from file.")
614 group.add_option("-r", "--reviewers", action="store", dest="reviewers",
615 metavar="REVIEWERS", default=None,
616 help="Add reviewers (comma separated email addresses).")
617 group.add_option("--cc", action="store", dest="cc",
618 metavar="CC", default='log2timeline-dev@googlegroups.com',
619 help="Add CC (comma separated email addresses).")
620 group.add_option("--private", action="store_true", dest="private",
621 default=False,
622 help="Make the issue restricted to reviewers and those CCed")
623 # Upload options
624 group = parser.add_option_group("Patch options")
625 group.add_option("-i", "--issue", type="int", action="store",
626 metavar="ISSUE", default=None,
627 help="Issue number to which to add. Defaults to new issue.")
628 group.add_option("--cache", action="store_true", dest="add_cache",
629 default=False, help="Add git cache parameter for new files.")
630 group.add_option("--base_url", action="store", dest="base_url", default=None,
631 help="Base URL path for files (listed as \"Base URL\" when "
632 "viewing issue). If omitted, will be guessed automatically "
633 "for SVN repos and left blank for others.")
634 group.add_option("--download_base", action="store_true",
635 dest="download_base", default=False,
636 help="Base files will be downloaded by the server "
637 "(side-by-side diffs may not work on files with CRs).")
638 group.add_option("--rev", action="store", dest="revision",
639 metavar="REV", default=None,
640 help="Base revision/branch/tree to diff against. Use "
641 "rev1:rev2 range to review already committed changeset.")
642 group.add_option("--send_mail", action="store_true",
643 dest="send_mail", default=False,
644 help="Send notification email to reviewers.")
645 group.add_option("-p", "--send_patch", action="store_true",
646 dest="send_patch", default=False,
647 help="Same as --send_mail, but include diff as an "
648 "attachment, and prepend email subject with 'PATCH:'.")
649 group.add_option("--vcs", action="store", dest="vcs",
650 metavar="VCS", default=None,
651 help=("Version control system (optional, usually upload.py "
652 "already guesses the right VCS)."))
653 group.add_option("--emulate_svn_auto_props", action="store_true",
654 dest="emulate_svn_auto_props", default=False,
655 help=("Emulate Subversion's auto properties feature."))
656 # Git-specific
657 group = parser.add_option_group("Git-specific options")
658 group.add_option("--git_similarity", action="store", dest="git_similarity",
659 metavar="SIM", type="int", default=50,
660 help=("Set the minimum similarity index for detecting renames "
661 "and copies. See `git diff -C`. (default 50)."))
662 group.add_option("--git_no_find_copies", action="store_false", default=True,
663 dest="git_find_copies",
664 help=("Prevents git from looking for copies (default off)."))
665 # Perforce-specific
666 group = parser.add_option_group("Perforce-specific options "
667 "(overrides P4 environment variables)")
668 group.add_option("--p4_port", action="store", dest="p4_port",
669 metavar="P4_PORT", default=None,
670 help=("Perforce server and port (optional)"))
671 group.add_option("--p4_changelist", action="store", dest="p4_changelist",
672 metavar="P4_CHANGELIST", default=None,
673 help=("Perforce changelist id"))
674 group.add_option("--p4_client", action="store", dest="p4_client",
675 metavar="P4_CLIENT", default=None,
676 help=("Perforce client/workspace"))
677 group.add_option("--p4_user", action="store", dest="p4_user",
678 metavar="P4_USER", default=None,
679 help=("Perforce user"))
680
681
682 # OAuth 2.0 Methods and Helpers
683 class ClientRedirectServer(BaseHTTPServer.HTTPServer):
684 """A server for redirects back to localhost from the associated server.
685
686 Waits for a single request and parses the query parameters for an access token
687 or an error and then stops serving.
688 """
689 access_token = None
690 error = None
691
692
693 class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
694 """A handler for redirects back to localhost from the associated server.
695
696 Waits for a single request and parses the query parameters into the server's
697 access_token or error and then stops serving.
698 """
699
700 def SetResponseValue(self):
701 """Stores the access token or error from the request on the server.
702
703 Will only do this if exactly one query parameter was passed in to the
704 request and that query parameter used 'access_token' or 'error' as the key.
705 """
706 query_string = urlparse.urlparse(self.path).query
707 query_params = urlparse.parse_qs(query_string)
708
709 if len(query_params) == 1:
710 if query_params.has_key(ACCESS_TOKEN_PARAM):
711 access_token_list = query_params[ACCESS_TOKEN_PARAM]
712 if len(access_token_list) == 1:
713 self.server.access_token = access_token_list[0]
714 else:
715 error_list = query_params.get(ERROR_PARAM, [])
716 if len(error_list) == 1:
717 self.server.error = error_list[0]
718
719 def do_GET(self):
720 """Handle a GET request.
721
722 Parses and saves the query parameters and prints a message that the server
723 has completed its lone task (handling a redirect).
724
725 Note that we can't detect if an error occurred.
726 """
727 self.send_response(200)
728 self.send_header('Content-type', 'text/html')
729 self.end_headers()
730 self.SetResponseValue()
731 self.wfile.write(AUTH_HANDLER_RESPONSE)
732
733 def log_message(self, format, *args):
734 """Do not log messages to stdout while running as command line program."""
735 pass
736
737
738 def OpenOAuth2ConsentPage(server=DEFAULT_REVIEW_SERVER,
739 port=DEFAULT_OAUTH2_PORT):
740 """Opens the OAuth 2.0 consent page or prints instructions how to.
741
742 Uses the webbrowser module to open the OAuth server side page in a browser.
743
744 Args:
745 server: String containing the review server URL. Defaults to
746 DEFAULT_REVIEW_SERVER.
747 port: Integer, the port where the localhost server receiving the redirect
748 is serving. Defaults to DEFAULT_OAUTH2_PORT.
749
750 Returns:
751 A boolean indicating whether the page opened successfully.
752 """
753 path = OAUTH_PATH_PORT_TEMPLATE % {'port': port}
754 parsed_url = urlparse.urlparse(server)
755 scheme = parsed_url[0] or 'https'
756 if scheme != 'https':
757 ErrorExit('Using OAuth requires a review server with SSL enabled.')
758 # If no scheme was given on command line the server address ends up in
759 # parsed_url.path otherwise in netloc.
760 host = parsed_url[1] or parsed_url[2]
761 page = '%s://%s%s' % (scheme, host, path)
762 page_opened = webbrowser.open(page, new=1, autoraise=True)
763 if page_opened:
764 print OPEN_LOCAL_MESSAGE_TEMPLATE % (page,)
765 return page_opened
766
767
768 def WaitForAccessToken(port=DEFAULT_OAUTH2_PORT):
769 """Spins up a simple HTTP Server to handle a single request.
770
771 Intended to handle a single redirect from the production server after the
772 user authenticated via OAuth 2.0 with the server.
773
774 Args:
775 port: Integer, the port where the localhost server receiving the redirect
776 is serving. Defaults to DEFAULT_OAUTH2_PORT.
777
778 Returns:
779 The access token passed to the localhost server, or None if no access token
780 was passed.
781 """
782 httpd = ClientRedirectServer((LOCALHOST_IP, port), ClientRedirectHandler)
783 # Wait to serve just one request before deferring control back
784 # to the caller of wait_for_refresh_token
785 httpd.handle_request()
786 if httpd.access_token is None:
787 ErrorExit(httpd.error or OAUTH_DEFAULT_ERROR_MESSAGE)
788 return httpd.access_token
789
790
791 def GetAccessToken(server=DEFAULT_REVIEW_SERVER, port=DEFAULT_OAUTH2_PORT,
792 open_local_webbrowser=True):
793 """Gets an Access Token for the current user.
794
795 Args:
796 server: String containing the review server URL. Defaults to
797 DEFAULT_REVIEW_SERVER.
798 port: Integer, the port where the localhost server receiving the redirect
799 is serving. Defaults to DEFAULT_OAUTH2_PORT.
800 open_local_webbrowser: Boolean, defaults to True. If set, opens a page in
801 the user's browser.
802
803 Returns:
804 A string access token that was sent to the local server. If the serving page
805 via WaitForAccessToken does not receive an access token, this method
806 returns None.
807 """
808 access_token = None
809 if open_local_webbrowser:
810 page_opened = OpenOAuth2ConsentPage(server=server, port=port)
811 if page_opened:
812 try:
813 access_token = WaitForAccessToken(port=port)
814 except socket.error, e:
815 print 'Can\'t start local webserver. Socket Error: %s\n' % (e.strerror,)
816
817 if access_token is None:
818 # TODO(dhermes): Offer to add to clipboard using xsel, xclip, pbcopy, etc.
819 page = 'https://%s%s' % (server, OAUTH_PATH)
820 print NO_OPEN_LOCAL_MESSAGE_TEMPLATE % (page,)
821 access_token = raw_input('Enter access token: ').strip()
822
823 return access_token
824
825
826 class KeyringCreds(object):
827 def __init__(self, server, host, email):
828 self.server = server
829 # Explicitly cast host to str to work around bug in old versions of Keyring
830 # (versions before 0.10). Even though newer versions of Keyring fix this,
831 # some modern linuxes (such as Ubuntu 12.04) still bundle a version with
832 # the bug.
833 self.host = str(host)
834 self.email = email
835 self.accounts_seen = set()
836
837 def GetUserCredentials(self):
838 """Prompts the user for a username and password.
839
840 Only use keyring on the initial call. If the keyring contains the wrong
841 password, we want to give the user a chance to enter another one.
842 """
843 # Create a local alias to the email variable to avoid Python's crazy
844 # scoping rules.
845 global keyring
846 email = self.email
847 if email is None:
848 email = GetEmail("Email (login for uploading to %s)" % self.server)
849 password = None
850 if keyring and not email in self.accounts_seen:
851 try:
852 password = keyring.get_password(self.host, email)
853 except:
854 # Sadly, we have to trap all errors here as
855 # gnomekeyring.IOError inherits from object. :/
856 print "Failed to get password from keyring"
857 keyring = None
858 if password is not None:
859 print "Using password from system keyring."
860 self.accounts_seen.add(email)
861 else:
862 password = getpass.getpass("Password for %s: " % email)
863 if keyring:
864 answer = raw_input("Store password in system keyring?(y/N) ").strip()
865 if answer == "y":
866 keyring.set_password(self.host, email, password)
867 self.accounts_seen.add(email)
868 return (email, password)
869
870
871 class OAuth2Creds(object):
872 """Simple object to hold server and port to be passed to GetAccessToken."""
873
874 def __init__(self, server, port, open_local_webbrowser=True):
875 self.server = server
876 self.port = port
877 self.open_local_webbrowser = open_local_webbrowser
878
879 def __call__(self):
880 """Uses stored server and port to retrieve OAuth 2.0 access token."""
881 return GetAccessToken(server=self.server, port=self.port,
882 open_local_webbrowser=self.open_local_webbrowser)
883
884
885 def GetRpcServer(server, email=None, host_override=None, save_cookies=True,
886 account_type=AUTH_ACCOUNT_TYPE, use_oauth2=False,
887 oauth2_port=DEFAULT_OAUTH2_PORT,
888 open_oauth2_local_webbrowser=True):
889 """Returns an instance of an AbstractRpcServer.
890
891 Args:
892 server: String containing the review server URL.
893 email: String containing user's email address.
894 host_override: If not None, string containing an alternate hostname to use
895 in the host header.
896 save_cookies: Whether authentication cookies should be saved to disk.
897 account_type: Account type for authentication, either 'GOOGLE'
898 or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE.
899 use_oauth2: Boolean indicating whether OAuth 2.0 should be used for
900 authentication.
901 oauth2_port: Integer, the port where the localhost server receiving the
902 redirect is serving. Defaults to DEFAULT_OAUTH2_PORT.
903 open_oauth2_local_webbrowser: Boolean, defaults to True. If True and using
904 OAuth, this opens a page in the user's browser to obtain a token.
905
906 Returns:
907 A new HttpRpcServer, on which RPC calls can be made.
908 """
909 # If this is the dev_appserver, use fake authentication.
910 host = (host_override or server).lower()
911 if re.match(r'(http://)?localhost([:/]|$)', host):
912 if email is None:
913 email = "test@example.com"
914 logging.info("Using debug user %s. Override with --email" % email)
915 server = HttpRpcServer(
916 server,
917 lambda: (email, "password"),
918 host_override=host_override,
919 extra_headers={"Cookie":
920 'dev_appserver_login="%s:False"' % email},
921 save_cookies=save_cookies,
922 account_type=account_type)
923 # Don't try to talk to ClientLogin.
924 server.authenticated = True
925 return server
926
927 positional_args = [server]
928 if use_oauth2:
929 positional_args.append(
930 OAuth2Creds(server, oauth2_port, open_oauth2_local_webbrowser))
931 else:
932 positional_args.append(KeyringCreds(server, host, email).GetUserCredentials)
933 return HttpRpcServer(*positional_args,
934 host_override=host_override,
935 save_cookies=save_cookies,
936 account_type=account_type)
937
938
939 def EncodeMultipartFormData(fields, files):
940 """Encode form fields for multipart/form-data.
941
942 Args:
943 fields: A sequence of (name, value) elements for regular form fields.
944 files: A sequence of (name, filename, value) elements for data to be
945 uploaded as files.
946 Returns:
947 (content_type, body) ready for httplib.HTTP instance.
948
949 Source:
950 http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
951 """
952 BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
953 CRLF = '\r\n'
954 lines = []
955 for (key, value) in fields:
956 lines.append('--' + BOUNDARY)
957 lines.append('Content-Disposition: form-data; name="%s"' % key)
958 lines.append('')
959 if isinstance(value, unicode):
960 value = value.encode('utf-8')
961 lines.append(value)
962 for (key, filename, value) in files:
963 lines.append('--' + BOUNDARY)
964 lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
965 (key, filename))
966 lines.append('Content-Type: %s' % GetContentType(filename))
967 lines.append('')
968 if isinstance(value, unicode):
969 value = value.encode('utf-8')
970 lines.append(value)
971 lines.append('--' + BOUNDARY + '--')
972 lines.append('')
973 body = CRLF.join(lines)
974 content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
975 return content_type, body
976
977
978 def GetContentType(filename):
979 """Helper to guess the content-type from the filename."""
980 return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
981
982
983 # Use a shell for subcommands on Windows to get a PATH search.
984 use_shell = sys.platform.startswith("win")
985
986 def RunShellWithReturnCodeAndStderr(command, print_output=False,
987 universal_newlines=True,
988 env=os.environ):
989 """Executes a command and returns the output from stdout, stderr and the return code.
990
991 Args:
992 command: Command to execute.
993 print_output: If True, the output is printed to stdout.
994 If False, both stdout and stderr are ignored.
995 universal_newlines: Use universal_newlines flag (default: True).
996
997 Returns:
998 Tuple (stdout, stderr, return code)
999 """
1000 logging.info("Running %s", command)
1001 env = env.copy()
1002 env['LC_MESSAGES'] = 'C'
1003 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1004 shell=use_shell, universal_newlines=universal_newlines,
1005 env=env)
1006 if print_output:
1007 output_array = []
1008 while True:
1009 line = p.stdout.readline()
1010 if not line:
1011 break
1012 print line.strip("\n")
1013 output_array.append(line)
1014 output = "".join(output_array)
1015 else:
1016 output = p.stdout.read()
1017 p.wait()
1018 errout = p.stderr.read()
1019 if print_output and errout:
1020 print >>sys.stderr, errout
1021 p.stdout.close()
1022 p.stderr.close()
1023 return output, errout, p.returncode
1024
1025 def RunShellWithReturnCode(command, print_output=False,
1026 universal_newlines=True,
1027 env=os.environ):
1028 """Executes a command and returns the output from stdout and the return code."""
1029 out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output,
1030 universal_newlines, env)
1031 return out, retcode
1032
1033 def RunShell(command, silent_ok=False, universal_newlines=True,
1034 print_output=False, env=os.environ):
1035 data, retcode = RunShellWithReturnCode(command, print_output,
1036 universal_newlines, env)
1037 if retcode:
1038 ErrorExit("Got error status from %s:\n%s" % (command, data))
1039 if not silent_ok and not data:
1040 ErrorExit("No output from %s" % command)
1041 return data
1042
1043
1044 class VersionControlSystem(object):
1045 """Abstract base class providing an interface to the VCS."""
1046
1047 def __init__(self, options):
1048 """Constructor.
1049
1050 Args:
1051 options: Command line options.
1052 """
1053 self.options = options
1054
1055 def GetGUID(self):
1056 """Return string to distinguish the repository from others, for example to
1057 query all opened review issues for it"""
1058 raise NotImplementedError(
1059 "abstract method -- subclass %s must override" % self.__class__)
1060
1061 def PostProcessDiff(self, diff):
1062 """Return the diff with any special post processing this VCS needs, e.g.
1063 to include an svn-style "Index:"."""
1064 return diff
1065
1066 def GenerateDiff(self, args):
1067 """Return the current diff as a string.
1068
1069 Args:
1070 args: Extra arguments to pass to the diff command.
1071 """
1072 raise NotImplementedError(
1073 "abstract method -- subclass %s must override" % self.__class__)
1074
1075 def GetUnknownFiles(self):
1076 """Return a list of files unknown to the VCS."""
1077 raise NotImplementedError(
1078 "abstract method -- subclass %s must override" % self.__class__)
1079
1080 def CheckForUnknownFiles(self):
1081 """Show an "are you sure?" prompt if there are unknown files."""
1082 unknown_files = self.GetUnknownFiles()
1083 if unknown_files:
1084 print "The following files are not added to version control:"
1085 for line in unknown_files:
1086 print line
1087 prompt = "Are you sure to continue?(y/N) "
1088 answer = raw_input(prompt).strip()
1089 if answer != "y":
1090 ErrorExit("User aborted")
1091
1092 def GetBaseFile(self, filename):
1093 """Get the content of the upstream version of a file.
1094
1095 Returns:
1096 A tuple (base_content, new_content, is_binary, status)
1097 base_content: The contents of the base file.
1098 new_content: For text files, this is empty. For binary files, this is
1099 the contents of the new file, since the diff output won't contain
1100 information to reconstruct the current file.
1101 is_binary: True iff the file is binary.
1102 status: The status of the file.
1103 """
1104
1105 raise NotImplementedError(
1106 "abstract method -- subclass %s must override" % self.__class__)
1107
1108
1109 def GetBaseFiles(self, diff):
1110 """Helper that calls GetBase file for each file in the patch.
1111
1112 Returns:
1113 A dictionary that maps from filename to GetBaseFile's tuple. Filenames
1114 are retrieved based on lines that start with "Index:" or
1115 "Property changes on:".
1116 """
1117 files = {}
1118 for line in diff.splitlines(True):
1119 if line.startswith('Index:') or line.startswith('Property changes on:'):
1120 unused, filename = line.split(':', 1)
1121 # On Windows if a file has property changes its filename uses '\'
1122 # instead of '/'.
1123 filename = filename.strip().replace('\\', '/')
1124 files[filename] = self.GetBaseFile(filename)
1125 return files
1126
1127
1128 def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
1129 files):
1130 """Uploads the base files (and if necessary, the current ones as well)."""
1131
1132 def UploadFile(filename, file_id, content, is_binary, status, is_base):
1133 """Uploads a file to the server."""
1134 file_too_large = False
1135 if is_base:
1136 type = "base"
1137 else:
1138 type = "current"
1139 if len(content) > MAX_UPLOAD_SIZE:
1140 print ("Not uploading the %s file for %s because it's too large." %
1141 (type, filename))
1142 file_too_large = True
1143 content = ""
1144 checksum = md5(content).hexdigest()
1145 if options.verbose > 0 and not file_too_large:
1146 print "Uploading %s file for %s" % (type, filename)
1147 url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
1148 form_fields = [("filename", filename),
1149 ("status", status),
1150 ("checksum", checksum),
1151 ("is_binary", str(is_binary)),
1152 ("is_current", str(not is_base)),
1153 ]
1154 if file_too_large:
1155 form_fields.append(("file_too_large", "1"))
1156 if options.email:
1157 form_fields.append(("user", options.email))
1158 ctype, body = EncodeMultipartFormData(form_fields,
1159 [("data", filename, content)])
1160 response_body = rpc_server.Send(url, body,
1161 content_type=ctype)
1162 if not response_body.startswith("OK"):
1163 StatusUpdate(" --> %s" % response_body)
1164 sys.exit(1)
1165
1166 patches = dict()
1167 [patches.setdefault(v, k) for k, v in patch_list]
1168 for filename in patches.keys():
1169 base_content, new_content, is_binary, status = files[filename]
1170 file_id_str = patches.get(filename)
1171 if file_id_str.find("nobase") != -1:
1172 base_content = None
1173 file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
1174 file_id = int(file_id_str)
1175 if base_content != None:
1176 UploadFile(filename, file_id, base_content, is_binary, status, True)
1177 if new_content != None:
1178 UploadFile(filename, file_id, new_content, is_binary, status, False)
1179
1180 def IsImage(self, filename):
1181 """Returns true if the filename has an image extension."""
1182 mimetype = mimetypes.guess_type(filename)[0]
1183 if not mimetype:
1184 return False
1185 return mimetype.startswith("image/") and not mimetype.startswith("image/svg")
1186
1187 def IsBinaryData(self, data):
1188 """Returns true if data contains a null byte."""
1189 # Derived from how Mercurial's heuristic, see
1190 # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229
1191 return bool(data and "\0" in data)
1192
1193
1194 class SubversionVCS(VersionControlSystem):
1195 """Implementation of the VersionControlSystem interface for Subversion."""
1196
1197 def __init__(self, options):
1198 super(SubversionVCS, self).__init__(options)
1199 if self.options.revision:
1200 match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
1201 if not match:
1202 ErrorExit("Invalid Subversion revision %s." % self.options.revision)
1203 self.rev_start = match.group(1)
1204 self.rev_end = match.group(3)
1205 else:
1206 self.rev_start = self.rev_end = None
1207 # Cache output from "svn list -r REVNO dirname".
1208 # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
1209 self.svnls_cache = {}
1210 # Base URL is required to fetch files deleted in an older revision.
1211 # Result is cached to not guess it over and over again in GetBaseFile().
1212 required = self.options.download_base or self.options.revision is not None
1213 self.svn_base = self._GuessBase(required)
1214
1215 def GetGUID(self):
1216 return self._GetInfo("Repository UUID")
1217
1218 def GuessBase(self, required):
1219 """Wrapper for _GuessBase."""
1220 return self.svn_base
1221
1222 def _GuessBase(self, required):
1223 """Returns base URL for current diff.
1224
1225 Args:
1226 required: If true, exits if the url can't be guessed, otherwise None is
1227 returned.
1228 """
1229 url = self._GetInfo("URL")
1230 if url:
1231 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1232 guess = ""
1233 # TODO(anatoli) - repository specific hacks should be handled by server
1234 if netloc == "svn.python.org" and scheme == "svn+ssh":
1235 path = "projects" + path
1236 scheme = "http"
1237 guess = "Python "
1238 elif netloc.endswith(".googlecode.com"):
1239 scheme = "http"
1240 guess = "Google Code "
1241 path = path + "/"
1242 base = urlparse.urlunparse((scheme, netloc, path, params,
1243 query, fragment))
1244 logging.info("Guessed %sbase = %s", guess, base)
1245 return base
1246 if required:
1247 ErrorExit("Can't find URL in output from svn info")
1248 return None
1249
1250 def _GetInfo(self, key):
1251 """Parses 'svn info' for current dir. Returns value for key or None"""
1252 for line in RunShell(["svn", "info"]).splitlines():
1253 if line.startswith(key + ": "):
1254 return line.split(":", 1)[1].strip()
1255
1256 def _EscapeFilename(self, filename):
1257 """Escapes filename for SVN commands."""
1258 if "@" in filename and not filename.endswith("@"):
1259 filename = "%s@" % filename
1260 return filename
1261
1262 def GenerateDiff(self, args):
1263 cmd = ["svn", "diff"]
1264 if self.options.revision:
1265 cmd += ["-r", self.options.revision]
1266 cmd.extend(args)
1267 data = RunShell(cmd)
1268 count = 0
1269 for line in data.splitlines():
1270 if line.startswith("Index:") or line.startswith("Property changes on:"):
1271 count += 1
1272 logging.info(line)
1273 if not count:
1274 ErrorExit("No valid patches found in output from svn diff")
1275 return data
1276
1277 def _CollapseKeywords(self, content, keyword_str):
1278 """Collapses SVN keywords."""
1279 # svn cat translates keywords but svn diff doesn't. As a result of this
1280 # behavior patching.PatchChunks() fails with a chunk mismatch error.
1281 # This part was originally written by the Review Board development team
1282 # who had the same problem (http://reviews.review-board.org/r/276/).
1283 # Mapping of keywords to known aliases
1284 svn_keywords = {
1285 # Standard keywords
1286 'Date': ['Date', 'LastChangedDate'],
1287 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
1288 'Author': ['Author', 'LastChangedBy'],
1289 'HeadURL': ['HeadURL', 'URL'],
1290 'Id': ['Id'],
1291
1292 # Aliases
1293 'LastChangedDate': ['LastChangedDate', 'Date'],
1294 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
1295 'LastChangedBy': ['LastChangedBy', 'Author'],
1296 'URL': ['URL', 'HeadURL'],
1297 }
1298
1299 def repl(m):
1300 if m.group(2):
1301 return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
1302 return "$%s$" % m.group(1)
1303 keywords = [keyword
1304 for name in keyword_str.split(" ")
1305 for keyword in svn_keywords.get(name, [])]
1306 return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
1307
1308 def GetUnknownFiles(self):
1309 status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
1310 unknown_files = []
1311 for line in status.split("\n"):
1312 if line and line[0] == "?":
1313 unknown_files.append(line)
1314 return unknown_files
1315
1316 def ReadFile(self, filename):
1317 """Returns the contents of a file."""
1318 file = open(filename, 'rb')
1319 result = ""
1320 try:
1321 result = file.read()
1322 finally:
1323 file.close()
1324 return result
1325
1326 def GetStatus(self, filename):
1327 """Returns the status of a file."""
1328 if not self.options.revision:
1329 status = RunShell(["svn", "status", "--ignore-externals",
1330 self._EscapeFilename(filename)])
1331 if not status:
1332 ErrorExit("svn status returned no output for %s" % filename)
1333 status_lines = status.splitlines()
1334 # If file is in a cl, the output will begin with
1335 # "\n--- Changelist 'cl_name':\n". See
1336 # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
1337 if (len(status_lines) == 3 and
1338 not status_lines[0] and
1339 status_lines[1].startswith("--- Changelist")):
1340 status = status_lines[2]
1341 else:
1342 status = status_lines[0]
1343 # If we have a revision to diff against we need to run "svn list"
1344 # for the old and the new revision and compare the results to get
1345 # the correct status for a file.
1346 else:
1347 dirname, relfilename = os.path.split(filename)
1348 if dirname not in self.svnls_cache:
1349 cmd = ["svn", "list", "-r", self.rev_start,
1350 self._EscapeFilename(dirname) or "."]
1351 out, err, returncode = RunShellWithReturnCodeAndStderr(cmd)
1352 if returncode:
1353 # Directory might not yet exist at start revison
1354 # svn: Unable to find repository location for 'abc' in revision nnn
1355 if re.match('^svn: Unable to find repository location for .+ in revision \d+', err):
1356 old_files = ()
1357 else:
1358 ErrorExit("Failed to get status for %s:\n%s" % (filename, err))
1359 else:
1360 old_files = out.splitlines()
1361 args = ["svn", "list"]
1362 if self.rev_end:
1363 args += ["-r", self.rev_end]
1364 cmd = args + [self._EscapeFilename(dirname) or "."]
1365 out, returncode = RunShellWithReturnCode(cmd)
1366 if returncode:
1367 ErrorExit("Failed to run command %s" % cmd)
1368 self.svnls_cache[dirname] = (old_files, out.splitlines())
1369 old_files, new_files = self.svnls_cache[dirname]
1370 if relfilename in old_files and relfilename not in new_files:
1371 status = "D "
1372 elif relfilename in old_files and relfilename in new_files:
1373 status = "M "
1374 else:
1375 status = "A "
1376 return status
1377
1378 def GetBaseFile(self, filename):
1379 status = self.GetStatus(filename)
1380 base_content = None
1381 new_content = None
1382
1383 # If a file is copied its status will be "A +", which signifies
1384 # "addition-with-history". See "svn st" for more information. We need to
1385 # upload the original file or else diff parsing will fail if the file was
1386 # edited.
1387 if status[0] == "A" and status[3] != "+":
1388 # We'll need to upload the new content if we're adding a binary file
1389 # since diff's output won't contain it.
1390 mimetype = RunShell(["svn", "propget", "svn:mime-type",
1391 self._EscapeFilename(filename)], silent_ok=True)
1392 base_content = ""
1393 is_binary = bool(mimetype) and not mimetype.startswith("text/")
1394 if is_binary:
1395 new_content = self.ReadFile(filename)
1396 elif (status[0] in ("M", "D", "R") or
1397 (status[0] == "A" and status[3] == "+") or # Copied file.
1398 (status[0] == " " and status[1] == "M")): # Property change.
1399 args = []
1400 if self.options.revision:
1401 # filename must not be escaped. We already add an ampersand here.
1402 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1403 else:
1404 # Don't change filename, it's needed later.
1405 url = filename
1406 args += ["-r", "BASE"]
1407 cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
1408 mimetype, returncode = RunShellWithReturnCode(cmd)
1409 if returncode:
1410 # File does not exist in the requested revision.
1411 # Reset mimetype, it contains an error message.
1412 mimetype = ""
1413 else:
1414 mimetype = mimetype.strip()
1415 get_base = False
1416 # this test for binary is exactly the test prescribed by the
1417 # official SVN docs at
1418 # http://subversion.apache.org/faq.html#binary-files
1419 is_binary = (bool(mimetype) and
1420 not mimetype.startswith("text/") and
1421 mimetype not in ("image/x-xbitmap", "image/x-xpixmap"))
1422 if status[0] == " ":
1423 # Empty base content just to force an upload.
1424 base_content = ""
1425 elif is_binary:
1426 get_base = True
1427 if status[0] == "M":
1428 if not self.rev_end:
1429 new_content = self.ReadFile(filename)
1430 else:
1431 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
1432 new_content = RunShell(["svn", "cat", url],
1433 universal_newlines=True, silent_ok=True)
1434 else:
1435 get_base = True
1436
1437 if get_base:
1438 if is_binary:
1439 universal_newlines = False
1440 else:
1441 universal_newlines = True
1442 if self.rev_start:
1443 # "svn cat -r REV delete_file.txt" doesn't work. cat requires
1444 # the full URL with "@REV" appended instead of using "-r" option.
1445 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1446 base_content = RunShell(["svn", "cat", url],
1447 universal_newlines=universal_newlines,
1448 silent_ok=True)
1449 else:
1450 base_content, ret_code = RunShellWithReturnCode(
1451 ["svn", "cat", self._EscapeFilename(filename)],
1452 universal_newlines=universal_newlines)
1453 if ret_code and status[0] == "R":
1454 # It's a replaced file without local history (see issue208).
1455 # The base file needs to be fetched from the server.
1456 url = "%s/%s" % (self.svn_base, filename)
1457 base_content = RunShell(["svn", "cat", url],
1458 universal_newlines=universal_newlines,
1459 silent_ok=True)
1460 elif ret_code:
1461 ErrorExit("Got error status from 'svn cat %s'" % filename)
1462 if not is_binary:
1463 args = []
1464 if self.rev_start:
1465 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1466 else:
1467 url = filename
1468 args += ["-r", "BASE"]
1469 cmd = ["svn"] + args + ["propget", "svn:keywords", url]
1470 keywords, returncode = RunShellWithReturnCode(cmd)
1471 if keywords and not returncode:
1472 base_content = self._CollapseKeywords(base_content, keywords)
1473 else:
1474 StatusUpdate("svn status returned unexpected output: %s" % status)
1475 sys.exit(1)
1476 return base_content, new_content, is_binary, status[0:5]
1477
1478
1479 class GitVCS(VersionControlSystem):
1480 """Implementation of the VersionControlSystem interface for Git."""
1481
1482 def __init__(self, options):
1483 super(GitVCS, self).__init__(options)
1484 # Map of filename -> (hash before, hash after) of base file.
1485 # Hashes for "no such file" are represented as None.
1486 self.hashes = {}
1487 # Map of new filename -> old filename for renames.
1488 self.renames = {}
1489
1490 def GetGUID(self):
1491 revlist = RunShell("git rev-list --parents HEAD".split()).splitlines()
1492 # M-A: Return the 1st root hash, there could be multiple when a
1493 # subtree is merged. In that case, more analysis would need to
1494 # be done to figure out which HEAD is the 'most representative'.
1495 for r in revlist:
1496 if ' ' not in r:
1497 return r
1498
1499 def PostProcessDiff(self, gitdiff):
1500 """Converts the diff output to include an svn-style "Index:" line as well
1501 as record the hashes of the files, so we can upload them along with our
1502 diff."""
1503 # Special used by git to indicate "no such content".
1504 NULL_HASH = "0"*40
1505
1506 def IsFileNew(filename):
1507 return filename in self.hashes and self.hashes[filename][0] is None
1508
1509 def AddSubversionPropertyChange(filename):
1510 """Add svn's property change information into the patch if given file is
1511 new file.
1512
1513 We use Subversion's auto-props setting to retrieve its property.
1514 See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for
1515 Subversion's [auto-props] setting.
1516 """
1517 if self.options.emulate_svn_auto_props and IsFileNew(filename):
1518 svnprops = GetSubversionPropertyChanges(filename)
1519 if svnprops:
1520 svndiff.append("\n" + svnprops + "\n")
1521
1522 svndiff = []
1523 filecount = 0
1524 filename = None
1525 for line in gitdiff.splitlines():
1526 match = re.match(r"diff --git a/(.*) b/(.*)$", line)
1527 if match:
1528 # Add auto property here for previously seen file.
1529 if filename is not None:
1530 AddSubversionPropertyChange(filename)
1531 filecount += 1
1532 # Intentionally use the "after" filename so we can show renames.
1533 filename = match.group(2)
1534 svndiff.append("Index: %s\n" % filename)
1535 if match.group(1) != match.group(2):
1536 self.renames[match.group(2)] = match.group(1)
1537 else:
1538 # The "index" line in a git diff looks like this (long hashes elided):
1539 # index 82c0d44..b2cee3f 100755
1540 # We want to save the left hash, as that identifies the base file.
1541 match = re.match(r"index (\w+)\.\.(\w+)", line)
1542 if match:
1543 before, after = (match.group(1), match.group(2))
1544 if before == NULL_HASH:
1545 before = None
1546 if after == NULL_HASH:
1547 after = None
1548 self.hashes[filename] = (before, after)
1549 svndiff.append(line + "\n")
1550 if not filecount:
1551 ErrorExit("No valid patches found in output from git diff")
1552 # Add auto property for the last seen file.
1553 assert filename is not None
1554 AddSubversionPropertyChange(filename)
1555 return "".join(svndiff)
1556
1557 def GenerateDiff(self, extra_args):
1558 extra_args = extra_args[:]
1559 if self.options.revision:
1560 if ":" in self.options.revision:
1561 extra_args = self.options.revision.split(":", 1) + extra_args
1562 else:
1563 extra_args = [self.options.revision] + extra_args
1564
1565 # --no-ext-diff is broken in some versions of Git, so try to work around
1566 # this by overriding the environment (but there is still a problem if the
1567 # git config key "diff.external" is used).
1568 env = os.environ.copy()
1569 if "GIT_EXTERNAL_DIFF" in env:
1570 del env["GIT_EXTERNAL_DIFF"]
1571 # -M/-C will not print the diff for the deleted file when a file is renamed.
1572 # This is confusing because the original file will not be shown on the
1573 # review when a file is renamed. So, get a diff with ONLY deletes, then
1574 # append a diff (with rename detection), without deletes.
1575 cmd = [
1576 "git", "diff", "--no-color", "--no-ext-diff", "--full-index",
1577 "--ignore-submodules",
1578 ]
1579 diff = RunShell(
1580 cmd + ["--no-renames", "--diff-filter=D"] + extra_args,
1581 env=env, silent_ok=True)
1582 if self.options.git_find_copies:
1583 similarity_options = ["--find-copies-harder", "-l100000",
1584 "-C%s" % self.options.git_similarity ]
1585 else:
1586 similarity_options = ["-M%s" % self.options.git_similarity ]
1587 diff += RunShell(
1588 cmd + ["--diff-filter=AMCRT"] + similarity_options + extra_args,
1589 env=env, silent_ok=True)
1590
1591 # Added by Kristinn.
1592 if self.options.add_cache:
1593 diff += RunShell(cmd + ["--cached"], env=env, silent_ok=True)
1594 # The CL could be only file deletion or not. So accept silent diff for both
1595 # commands then check for an empty diff manually.
1596 if not diff:
1597 ErrorExit("No output from %s" % (cmd + extra_args))
1598 return diff
1599
1600 def GetUnknownFiles(self):
1601 status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
1602 silent_ok=True)
1603 return status.splitlines()
1604
1605 def GetFileContent(self, file_hash, is_binary):
1606 """Returns the content of a file identified by its git hash."""
1607 data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
1608 universal_newlines=not is_binary)
1609 if retcode:
1610 ErrorExit("Got error status from 'git show %s'" % file_hash)
1611 return data
1612
1613 def GetBaseFile(self, filename):
1614 hash_before, hash_after = self.hashes.get(filename, (None,None))
1615 base_content = None
1616 new_content = None
1617 status = None
1618
1619 if filename in self.renames:
1620 status = "A +" # Match svn attribute name for renames.
1621 if filename not in self.hashes:
1622 # If a rename doesn't change the content, we never get a hash.
1623 base_content = RunShell(
1624 ["git", "show", "HEAD:" + filename], silent_ok=True)
1625 elif not hash_before:
1626 status = "A"
1627 base_content = ""
1628 elif not hash_after:
1629 status = "D"
1630 else:
1631 status = "M"
1632
1633 is_image = self.IsImage(filename)
1634 is_binary = self.IsBinaryData(base_content) or is_image
1635
1636 # Grab the before/after content if we need it.
1637 # Grab the base content if we don't have it already.
1638 if base_content is None and hash_before:
1639 base_content = self.GetFileContent(hash_before, is_binary)
1640 # Only include the "after" file if it's an image; otherwise it
1641 # it is reconstructed from the diff.
1642 if is_image and hash_after:
1643 new_content = self.GetFileContent(hash_after, is_binary)
1644
1645 return (base_content, new_content, is_binary, status)
1646
1647
1648 class CVSVCS(VersionControlSystem):
1649 """Implementation of the VersionControlSystem interface for CVS."""
1650
1651 def __init__(self, options):
1652 super(CVSVCS, self).__init__(options)
1653
1654 def GetGUID(self):
1655 """For now we don't know how to get repository ID for CVS"""
1656 return
1657
1658 def GetOriginalContent_(self, filename):
1659 RunShell(["cvs", "up", filename], silent_ok=True)
1660 # TODO need detect file content encoding
1661 content = open(filename).read()
1662 return content.replace("\r\n", "\n")
1663
1664 def GetBaseFile(self, filename):
1665 base_content = None
1666 new_content = None
1667 status = "A"
1668
1669 output, retcode = RunShellWithReturnCode(["cvs", "status", filename])
1670 if retcode:
1671 ErrorExit("Got error status from 'cvs status %s'" % filename)
1672
1673 if output.find("Status: Locally Modified") != -1:
1674 status = "M"
1675 temp_filename = "%s.tmp123" % filename
1676 os.rename(filename, temp_filename)
1677 base_content = self.GetOriginalContent_(filename)
1678 os.rename(temp_filename, filename)
1679 elif output.find("Status: Locally Added"):
1680 status = "A"
1681 base_content = ""
1682 elif output.find("Status: Needs Checkout"):
1683 status = "D"
1684 base_content = self.GetOriginalContent_(filename)
1685
1686 return (base_content, new_content, self.IsBinaryData(base_content), status)
1687
1688 def GenerateDiff(self, extra_args):
1689 cmd = ["cvs", "diff", "-u", "-N"]
1690 if self.options.revision:
1691 cmd += ["-r", self.options.revision]
1692
1693 cmd.extend(extra_args)
1694 data, retcode = RunShellWithReturnCode(cmd)
1695 count = 0
1696 if retcode in [0, 1]:
1697 for line in data.splitlines():
1698 if line.startswith("Index:"):
1699 count += 1
1700 logging.info(line)
1701
1702 if not count:
1703 ErrorExit("No valid patches found in output from cvs diff")
1704
1705 return data
1706
1707 def GetUnknownFiles(self):
1708 data, retcode = RunShellWithReturnCode(["cvs", "diff"])
1709 if retcode not in [0, 1]:
1710 ErrorExit("Got error status from 'cvs diff':\n%s" % (data,))
1711 unknown_files = []
1712 for line in data.split("\n"):
1713 if line and line[0] == "?":
1714 unknown_files.append(line)
1715 return unknown_files
1716
1717 class MercurialVCS(VersionControlSystem):
1718 """Implementation of the VersionControlSystem interface for Mercurial."""
1719
1720 def __init__(self, options, repo_dir):
1721 super(MercurialVCS, self).__init__(options)
1722 # Absolute path to repository (we can be in a subdir)
1723 self.repo_dir = os.path.normpath(repo_dir)
1724 # Compute the subdir
1725 cwd = os.path.normpath(os.getcwd())
1726 assert cwd.startswith(self.repo_dir)
1727 self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
1728 if self.options.revision:
1729 self.base_rev = self.options.revision
1730 else:
1731 self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
1732
1733 def GetGUID(self):
1734 # See chapter "Uniquely identifying a repository"
1735 # http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html
1736 info = RunShell("hg log -r0 --template {node}".split())
1737 return info.strip()
1738
1739 def _GetRelPath(self, filename):
1740 """Get relative path of a file according to the current directory,
1741 given its logical path in the repo."""
1742 absname = os.path.join(self.repo_dir, filename)
1743 return os.path.relpath(absname)
1744
1745 def GenerateDiff(self, extra_args):
1746 cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
1747 data = RunShell(cmd, silent_ok=True)
1748 svndiff = []
1749 filecount = 0
1750 for line in data.splitlines():
1751 m = re.match("diff --git a/(\S+) b/(\S+)", line)
1752 if m:
1753 # Modify line to make it look like as it comes from svn diff.
1754 # With this modification no changes on the server side are required
1755 # to make upload.py work with Mercurial repos.
1756 # NOTE: for proper handling of moved/copied files, we have to use
1757 # the second filename.
1758 filename = m.group(2)
1759 svndiff.append("Index: %s" % filename)
1760 svndiff.append("=" * 67)
1761 filecount += 1
1762 logging.info(line)
1763 else:
1764 svndiff.append(line)
1765 if not filecount:
1766 ErrorExit("No valid patches found in output from hg diff")
1767 return "\n".join(svndiff) + "\n"
1768
1769 def GetUnknownFiles(self):
1770 """Return a list of files unknown to the VCS."""
1771 args = []
1772 status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
1773 silent_ok=True)
1774 unknown_files = []
1775 for line in status.splitlines():
1776 st, fn = line.split(" ", 1)
1777 if st == "?":
1778 unknown_files.append(fn)
1779 return unknown_files
1780
1781 def GetBaseFile(self, filename):
1782 # "hg status" and "hg cat" both take a path relative to the current subdir,
1783 # but "hg diff" has given us the path relative to the repo root.
1784 base_content = ""
1785 new_content = None
1786 is_binary = False
1787 oldrelpath = relpath = self._GetRelPath(filename)
1788 # "hg status -C" returns two lines for moved/copied files, one otherwise
1789 out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
1790 out = out.splitlines()
1791 # HACK: strip error message about missing file/directory if it isn't in
1792 # the working copy
1793 if out[0].startswith('%s: ' % relpath):
1794 out = out[1:]
1795 status, _ = out[0].split(' ', 1)
1796 if len(out) > 1 and status == "A":
1797 # Moved/copied => considered as modified, use old filename to
1798 # retrieve base contents
1799 oldrelpath = out[1].strip()
1800 status = "M"
1801 if ":" in self.base_rev:
1802 base_rev = self.base_rev.split(":", 1)[0]
1803 else:
1804 base_rev = self.base_rev
1805 if status != "A":
1806 base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
1807 silent_ok=True)
1808 is_binary = self.IsBinaryData(base_content)
1809 if status != "R":
1810 new_content = open(relpath, "rb").read()
1811 is_binary = is_binary or self.IsBinaryData(new_content)
1812 if is_binary and base_content:
1813 # Fetch again without converting newlines
1814 base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
1815 silent_ok=True, universal_newlines=False)
1816 if not is_binary:
1817 new_content = None
1818 return base_content, new_content, is_binary, status
1819
1820
1821 class PerforceVCS(VersionControlSystem):
1822 """Implementation of the VersionControlSystem interface for Perforce."""
1823
1824 def __init__(self, options):
1825
1826 def ConfirmLogin():
1827 # Make sure we have a valid perforce session
1828 while True:
1829 data, retcode = self.RunPerforceCommandWithReturnCode(
1830 ["login", "-s"], marshal_output=True)
1831 if not data:
1832 ErrorExit("Error checking perforce login")
1833 if not retcode and (not "code" in data or data["code"] != "error"):
1834 break
1835 print "Enter perforce password: "
1836 self.RunPerforceCommandWithReturnCode(["login"])
1837
1838 super(PerforceVCS, self).__init__(options)
1839
1840 self.p4_changelist = options.p4_changelist
1841 if not self.p4_changelist:
1842 ErrorExit("A changelist id is required")
1843 if (options.revision):
1844 ErrorExit("--rev is not supported for perforce")
1845
1846 self.p4_port = options.p4_port
1847 self.p4_client = options.p4_client
1848 self.p4_user = options.p4_user
1849
1850 ConfirmLogin()
1851
1852 if not options.title:
1853 description = self.RunPerforceCommand(["describe", self.p4_changelist],
1854 marshal_output=True)
1855 if description and "desc" in description:
1856 # Rietveld doesn't support multi-line descriptions
1857 raw_title = description["desc"].strip()
1858 lines = raw_title.splitlines()
1859 if len(lines):
1860 options.title = lines[0]
1861
1862 def GetGUID(self):
1863 """For now we don't know how to get repository ID for Perforce"""
1864 return
1865
1866 def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False,
1867 universal_newlines=True):
1868 args = ["p4"]
1869 if marshal_output:
1870 # -G makes perforce format its output as marshalled python objects
1871 args.extend(["-G"])
1872 if self.p4_port:
1873 args.extend(["-p", self.p4_port])
1874 if self.p4_client:
1875 args.extend(["-c", self.p4_client])
1876 if self.p4_user:
1877 args.extend(["-u", self.p4_user])
1878 args.extend(extra_args)
1879
1880 data, retcode = RunShellWithReturnCode(
1881 args, print_output=False, universal_newlines=universal_newlines)
1882 if marshal_output and data:
1883 data = marshal.loads(data)
1884 return data, retcode
1885
1886 def RunPerforceCommand(self, extra_args, marshal_output=False,
1887 universal_newlines=True):
1888 # This might be a good place to cache call results, since things like
1889 # describe or fstat might get called repeatedly.
1890 data, retcode = self.RunPerforceCommandWithReturnCode(
1891 extra_args, marshal_output, universal_newlines)
1892 if retcode:
1893 ErrorExit("Got error status from %s:\n%s" % (extra_args, data))
1894 return data
1895
1896 def GetFileProperties(self, property_key_prefix = "", command = "describe"):
1897 description = self.RunPerforceCommand(["describe", self.p4_changelist],
1898 marshal_output=True)
1899
1900 changed_files = {}
1901 file_index = 0
1902 # Try depotFile0, depotFile1, ... until we don't find a match
1903 while True:
1904 file_key = "depotFile%d" % file_index
1905 if file_key in description:
1906 filename = description[file_key]
1907 change_type = description[property_key_prefix + str(file_index)]
1908 changed_files[filename] = change_type
1909 file_index += 1
1910 else:
1911 break
1912 return changed_files
1913
1914 def GetChangedFiles(self):
1915 return self.GetFileProperties("action")
1916
1917 def GetUnknownFiles(self):
1918 # Perforce doesn't detect new files, they have to be explicitly added
1919 return []
1920
1921 def IsBaseBinary(self, filename):
1922 base_filename = self.GetBaseFilename(filename)
1923 return self.IsBinaryHelper(base_filename, "files")
1924
1925 def IsPendingBinary(self, filename):
1926 return self.IsBinaryHelper(filename, "describe")
1927
1928 def IsBinaryHelper(self, filename, command):
1929 file_types = self.GetFileProperties("type", command)
1930 if not filename in file_types:
1931 ErrorExit("Trying to check binary status of unknown file %s." % filename)
1932 # This treats symlinks, macintosh resource files, temporary objects, and
1933 # unicode as binary. See the Perforce docs for more details:
1934 # http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html
1935 return not file_types[filename].endswith("text")
1936
1937 def GetFileContent(self, filename, revision, is_binary):
1938 file_arg = filename
1939 if revision:
1940 file_arg += "#" + revision
1941 # -q suppresses the initial line that displays the filename and revision
1942 return self.RunPerforceCommand(["print", "-q", file_arg],
1943 universal_newlines=not is_binary)
1944
1945 def GetBaseFilename(self, filename):
1946 actionsWithDifferentBases = [
1947 "move/add", # p4 move
1948 "branch", # p4 integrate (to a new file), similar to hg "add"
1949 "add", # p4 integrate (to a new file), after modifying the new file
1950 ]
1951
1952 # We only see a different base for "add" if this is a downgraded branch
1953 # after a file was branched (integrated), then edited.
1954 if self.GetAction(filename) in actionsWithDifferentBases:
1955 # -Or shows information about pending integrations/moves
1956 fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename],
1957 marshal_output=True)
1958
1959 baseFileKey = "resolveFromFile0" # I think it's safe to use only file0
1960 if baseFileKey in fstat_result:
1961 return fstat_result[baseFileKey]
1962
1963 return filename
1964
1965 def GetBaseRevision(self, filename):
1966 base_filename = self.GetBaseFilename(filename)
1967
1968 have_result = self.RunPerforceCommand(["have", base_filename],
1969 marshal_output=True)
1970 if "haveRev" in have_result:
1971 return have_result["haveRev"]
1972
1973 def GetLocalFilename(self, filename):
1974 where = self.RunPerforceCommand(["where", filename], marshal_output=True)
1975 if "path" in where:
1976 return where["path"]
1977
1978 def GenerateDiff(self, args):
1979 class DiffData:
1980 def __init__(self, perforceVCS, filename, action):
1981 self.perforceVCS = perforceVCS
1982 self.filename = filename
1983 self.action = action
1984 self.base_filename = perforceVCS.GetBaseFilename(filename)
1985
1986 self.file_body = None
1987 self.base_rev = None
1988 self.prefix = None
1989 self.working_copy = True
1990 self.change_summary = None
1991
1992 def GenerateDiffHeader(diffData):
1993 header = []
1994 header.append("Index: %s" % diffData.filename)
1995 header.append("=" * 67)
1996
1997 if diffData.base_filename != diffData.filename:
1998 if diffData.action.startswith("move"):
1999 verb = "rename"
2000 else:
2001 verb = "copy"
2002 header.append("%s from %s" % (verb, diffData.base_filename))
2003 header.append("%s to %s" % (verb, diffData.filename))
2004
2005 suffix = "\t(revision %s)" % diffData.base_rev
2006 header.append("--- " + diffData.base_filename + suffix)
2007 if diffData.working_copy:
2008 suffix = "\t(working copy)"
2009 header.append("+++ " + diffData.filename + suffix)
2010 if diffData.change_summary:
2011 header.append(diffData.change_summary)
2012 return header
2013
2014 def GenerateMergeDiff(diffData, args):
2015 # -du generates a unified diff, which is nearly svn format
2016 diffData.file_body = self.RunPerforceCommand(
2017 ["diff", "-du", diffData.filename] + args)
2018 diffData.base_rev = self.GetBaseRevision(diffData.filename)
2019 diffData.prefix = ""
2020
2021 # We have to replace p4's file status output (the lines starting
2022 # with +++ or ---) to match svn's diff format
2023 lines = diffData.file_body.splitlines()
2024 first_good_line = 0
2025 while (first_good_line < len(lines) and
2026 not lines[first_good_line].startswith("@@")):
2027 first_good_line += 1
2028 diffData.file_body = "\n".join(lines[first_good_line:])
2029 return diffData
2030
2031 def GenerateAddDiff(diffData):
2032 fstat = self.RunPerforceCommand(["fstat", diffData.filename],
2033 marshal_output=True)
2034 if "headRev" in fstat:
2035 diffData.base_rev = fstat["headRev"] # Re-adding a deleted file
2036 else:
2037 diffData.base_rev = "0" # Brand new file
2038 diffData.working_copy = False
2039 rel_path = self.GetLocalFilename(diffData.filename)
2040 diffData.file_body = open(rel_path, 'r').read()
2041 # Replicate svn's list of changed lines
2042 line_count = len(diffData.file_body.splitlines())
2043 diffData.change_summary = "@@ -0,0 +1"
2044 if line_count > 1:
2045 diffData.change_summary += ",%d" % line_count
2046 diffData.change_summary += " @@"
2047 diffData.prefix = "+"
2048 return diffData
2049
2050 def GenerateDeleteDiff(diffData):
2051 diffData.base_rev = self.GetBaseRevision(diffData.filename)
2052 is_base_binary = self.IsBaseBinary(diffData.filename)
2053 # For deletes, base_filename == filename
2054 diffData.file_body = self.GetFileContent(diffData.base_filename,
2055 None,
2056 is_base_binary)
2057 # Replicate svn's list of changed lines
2058 line_count = len(diffData.file_body.splitlines())
2059 diffData.change_summary = "@@ -1"
2060 if line_count > 1:
2061 diffData.change_summary += ",%d" % line_count
2062 diffData.change_summary += " +0,0 @@"
2063 diffData.prefix = "-"
2064 return diffData
2065
2066 changed_files = self.GetChangedFiles()
2067
2068 svndiff = []
2069 filecount = 0
2070 for (filename, action) in changed_files.items():
2071 svn_status = self.PerforceActionToSvnStatus(action)
2072 if svn_status == "SKIP":
2073 continue
2074
2075 diffData = DiffData(self, filename, action)
2076 # Is it possible to diff a branched file? Stackoverflow says no:
2077 # http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-to-diff-a-file-reopened-for-add
2078 if svn_status == "M":
2079 diffData = GenerateMergeDiff(diffData, args)
2080 elif svn_status == "A":
2081 diffData = GenerateAddDiff(diffData)
2082 elif svn_status == "D":
2083 diffData = GenerateDeleteDiff(diffData)
2084 else:
2085 ErrorExit("Unknown file action %s (svn action %s)." % \
2086 (action, svn_status))
2087
2088 svndiff += GenerateDiffHeader(diffData)
2089
2090 for line in diffData.file_body.splitlines():
2091 svndiff.append(diffData.prefix + line)
2092 filecount += 1
2093 if not filecount:
2094 ErrorExit("No valid patches found in output from p4 diff")
2095 return "\n".join(svndiff) + "\n"
2096
2097 def PerforceActionToSvnStatus(self, status):
2098 # Mirroring the list at http://permalink.gmane.org/gmane.comp.version-control.mercurial.devel/28717
2099 # Is there something more official?
2100 return {
2101 "add" : "A",
2102 "branch" : "A",
2103 "delete" : "D",
2104 "edit" : "M", # Also includes changing file types.
2105 "integrate" : "M",
2106 "move/add" : "M",
2107 "move/delete": "SKIP",
2108 "purge" : "D", # How does a file's status become "purge"?
2109 }[status]
2110
2111 def GetAction(self, filename):
2112 changed_files = self.GetChangedFiles()
2113 if not filename in changed_files:
2114 ErrorExit("Trying to get base version of unknown file %s." % filename)
2115
2116 return changed_files[filename]
2117
2118 def GetBaseFile(self, filename):
2119 base_filename = self.GetBaseFilename(filename)
2120 base_content = ""
2121 new_content = None
2122
2123 status = self.PerforceActionToSvnStatus(self.GetAction(filename))
2124
2125 if status != "A":
2126 revision = self.GetBaseRevision(base_filename)
2127 if not revision:
2128 ErrorExit("Couldn't find base revision for file %s" % filename)
2129 is_base_binary = self.IsBaseBinary(base_filename)
2130 base_content = self.GetFileContent(base_filename,
2131 revision,
2132 is_base_binary)
2133
2134 is_binary = self.IsPendingBinary(filename)
2135 if status != "D" and status != "SKIP":
2136 relpath = self.GetLocalFilename(filename)
2137 if is_binary:
2138 new_content = open(relpath, "rb").read()
2139
2140 return base_content, new_content, is_binary, status
2141
2142 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
2143 def SplitPatch(data):
2144 """Splits a patch into separate pieces for each file.
2145
2146 Args:
2147 data: A string containing the output of svn diff.
2148
2149 Returns:
2150 A list of 2-tuple (filename, text) where text is the svn diff output
2151 pertaining to filename.
2152 """
2153 patches = []
2154 filename = None
2155 diff = []
2156 for line in data.splitlines(True):
2157 new_filename = None
2158 if line.startswith('Index:'):
2159 unused, new_filename = line.split(':', 1)
2160 new_filename = new_filename.strip()
2161 elif line.startswith('Property changes on:'):
2162 unused, temp_filename = line.split(':', 1)
2163 # When a file is modified, paths use '/' between directories, however
2164 # when a property is modified '\' is used on Windows. Make them the same
2165 # otherwise the file shows up twice.
2166 temp_filename = temp_filename.strip().replace('\\', '/')
2167 if temp_filename != filename:
2168 # File has property changes but no modifications, create a new diff.
2169 new_filename = temp_filename
2170 if new_filename:
2171 if filename and diff:
2172 patches.append((filename, ''.join(diff)))
2173 filename = new_filename
2174 diff = [line]
2175 continue
2176 if diff is not None:
2177 diff.append(line)
2178 if filename and diff:
2179 patches.append((filename, ''.join(diff)))
2180 return patches
2181
2182
2183 def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
2184 """Uploads a separate patch for each file in the diff output.
2185
2186 Returns a list of [patch_key, filename] for each file.
2187 """
2188 patches = SplitPatch(data)
2189 rv = []
2190 for patch in patches:
2191 if len(patch[1]) > MAX_UPLOAD_SIZE:
2192 print ("Not uploading the patch for " + patch[0] +
2193 " because the file is too large.")
2194 continue
2195 form_fields = [("filename", patch[0])]
2196 if not options.download_base:
2197 form_fields.append(("content_upload", "1"))
2198 files = [("data", "data.diff", patch[1])]
2199 ctype, body = EncodeMultipartFormData(form_fields, files)
2200 url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
2201 print "Uploading patch for " + patch[0]
2202 response_body = rpc_server.Send(url, body, content_type=ctype)
2203 lines = response_body.splitlines()
2204 if not lines or lines[0] != "OK":
2205 StatusUpdate(" --> %s" % response_body)
2206 sys.exit(1)
2207 rv.append([lines[1], patch[0]])
2208 return rv
2209
2210
2211 def GuessVCSName(options):
2212 """Helper to guess the version control system.
2213
2214 This examines the current directory, guesses which VersionControlSystem
2215 we're using, and returns an string indicating which VCS is detected.
2216
2217 Returns:
2218 A pair (vcs, output). vcs is a string indicating which VCS was detected
2219 and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE,
2220 VCS_CVS, or VCS_UNKNOWN.
2221 Since local perforce repositories can't be easily detected, this method
2222 will only guess VCS_PERFORCE if any perforce options have been specified.
2223 output is a string containing any interesting output from the vcs
2224 detection routine, or None if there is nothing interesting.
2225 """
2226 for attribute, value in options.__dict__.iteritems():
2227 if attribute.startswith("p4") and value != None:
2228 return (VCS_PERFORCE, None)
2229
2230 def RunDetectCommand(vcs_type, command):
2231 """Helper to detect VCS by executing command.
2232
2233 Returns:
2234 A pair (vcs, output) or None. Throws exception on error.
2235 """
2236 try:
2237 out, returncode = RunShellWithReturnCode(command)
2238 if returncode == 0:
2239 return (vcs_type, out.strip())
2240 except OSError, (errcode, message):
2241 if errcode != errno.ENOENT: # command not found code
2242 raise
2243
2244 # Mercurial has a command to get the base directory of a repository
2245 # Try running it, but don't die if we don't have hg installed.
2246 # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
2247 res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"])
2248 if res != None:
2249 return res
2250
2251 # Subversion from 1.7 has a single centralized .svn folder
2252 # ( see http://subversion.apache.org/docs/release-notes/1.7.html#wc-ng )
2253 # That's why we use 'svn info' instead of checking for .svn dir
2254 res = RunDetectCommand(VCS_SUBVERSION, ["svn", "info"])
2255 if res != None:
2256 return res
2257
2258 # Git has a command to test if you're in a git tree.
2259 # Try running it, but don't die if we don't have git installed.
2260 res = RunDetectCommand(VCS_GIT, ["git", "rev-parse",
2261 "--is-inside-work-tree"])
2262 if res != None:
2263 return res
2264
2265 # detect CVS repos use `cvs status && $? == 0` rules
2266 res = RunDetectCommand(VCS_CVS, ["cvs", "status"])
2267 if res != None:
2268 return res
2269
2270 return (VCS_UNKNOWN, None)
2271
2272
2273 def GuessVCS(options):
2274 """Helper to guess the version control system.
2275
2276 This verifies any user-specified VersionControlSystem (by command line
2277 or environment variable). If the user didn't specify one, this examines
2278 the current directory, guesses which VersionControlSystem we're using,
2279 and returns an instance of the appropriate class. Exit with an error
2280 if we can't figure it out.
2281
2282 Returns:
2283 A VersionControlSystem instance. Exits if the VCS can't be guessed.
2284 """
2285 vcs = options.vcs
2286 if not vcs:
2287 vcs = os.environ.get("CODEREVIEW_VCS")
2288 if vcs:
2289 v = VCS_ABBREVIATIONS.get(vcs.lower())
2290 if v is None:
2291 ErrorExit("Unknown version control system %r specified." % vcs)
2292 (vcs, extra_output) = (v, None)
2293 else:
2294 (vcs, extra_output) = GuessVCSName(options)
2295
2296 if vcs == VCS_MERCURIAL:
2297 if extra_output is None:
2298 extra_output = RunShell(["hg", "root"]).strip()
2299 return MercurialVCS(options, extra_output)
2300 elif vcs == VCS_SUBVERSION:
2301 return SubversionVCS(options)
2302 elif vcs == VCS_PERFORCE:
2303 return PerforceVCS(options)
2304 elif vcs == VCS_GIT:
2305 return GitVCS(options)
2306 elif vcs == VCS_CVS:
2307 return CVSVCS(options)
2308
2309 ErrorExit(("Could not guess version control system. "
2310 "Are you in a working copy directory?"))
2311
2312
2313 def CheckReviewer(reviewer):
2314 """Validate a reviewer -- either a nickname or an email addres.
2315
2316 Args:
2317 reviewer: A nickname or an email address.
2318
2319 Calls ErrorExit() if it is an invalid email address.
2320 """
2321 if "@" not in reviewer:
2322 return # Assume nickname
2323 parts = reviewer.split("@")
2324 if len(parts) > 2:
2325 ErrorExit("Invalid email address: %r" % reviewer)
2326 assert len(parts) == 2
2327 if "." not in parts[1]:
2328 ErrorExit("Invalid email address: %r" % reviewer)
2329
2330
2331 def LoadSubversionAutoProperties():
2332 """Returns the content of [auto-props] section of Subversion's config file as
2333 a dictionary.
2334
2335 Returns:
2336 A dictionary whose key-value pair corresponds the [auto-props] section's
2337 key-value pair.
2338 In following cases, returns empty dictionary:
2339 - config file doesn't exist, or
2340 - 'enable-auto-props' is not set to 'true-like-value' in [miscellany].
2341 """
2342 if os.name == 'nt':
2343 subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config"
2344 else:
2345 subversion_config = os.path.expanduser("~/.subversion/config")
2346 if not os.path.exists(subversion_config):
2347 return {}
2348 config = ConfigParser.ConfigParser()
2349 config.read(subversion_config)
2350 if (config.has_section("miscellany") and
2351 config.has_option("miscellany", "enable-auto-props") and
2352 config.getboolean("miscellany", "enable-auto-props") and
2353 config.has_section("auto-props")):
2354 props = {}
2355 for file_pattern in config.options("auto-props"):
2356 props[file_pattern] = ParseSubversionPropertyValues(
2357 config.get("auto-props", file_pattern))
2358 return props
2359 else:
2360 return {}
2361
2362 def ParseSubversionPropertyValues(props):
2363 """Parse the given property value which comes from [auto-props] section and
2364 returns a list whose element is a (svn_prop_key, svn_prop_value) pair.
2365
2366 See the following doctest for example.
2367
2368 >>> ParseSubversionPropertyValues('svn:eol-style=LF')
2369 [('svn:eol-style', 'LF')]
2370 >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg')
2371 [('svn:mime-type', 'image/jpeg')]
2372 >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable')
2373 [('svn:eol-style', 'LF'), ('svn:executable', '*')]
2374 """
2375 key_value_pairs = []
2376 for prop in props.split(";"):
2377 key_value = prop.split("=")
2378 assert len(key_value) <= 2
2379 if len(key_value) == 1:
2380 # If value is not given, use '*' as a Subversion's convention.
2381 key_value_pairs.append((key_value[0], "*"))
2382 else:
2383 key_value_pairs.append((key_value[0], key_value[1]))
2384 return key_value_pairs
2385
2386
2387 def GetSubversionPropertyChanges(filename):
2388 """Return a Subversion's 'Property changes on ...' string, which is used in
2389 the patch file.
2390
2391 Args:
2392 filename: filename whose property might be set by [auto-props] config.
2393
2394 Returns:
2395 A string like 'Property changes on |filename| ...' if given |filename|
2396 matches any entries in [auto-props] section. None, otherwise.
2397 """
2398 global svn_auto_props_map
2399 if svn_auto_props_map is None:
2400 svn_auto_props_map = LoadSubversionAutoProperties()
2401
2402 all_props = []
2403 for file_pattern, props in svn_auto_props_map.items():
2404 if fnmatch.fnmatch(filename, file_pattern):
2405 all_props.extend(props)
2406 if all_props:
2407 return FormatSubversionPropertyChanges(filename, all_props)
2408 return None
2409
2410
2411 def FormatSubversionPropertyChanges(filename, props):
2412 """Returns Subversion's 'Property changes on ...' strings using given filename
2413 and properties.
2414
2415 Args:
2416 filename: filename
2417 props: A list whose element is a (svn_prop_key, svn_prop_value) pair.
2418
2419 Returns:
2420 A string which can be used in the patch file for Subversion.
2421
2422 See the following doctest for example.
2423
2424 >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')])
2425 Property changes on: foo.cc
2426 ___________________________________________________________________
2427 Added: svn:eol-style
2428 + LF
2429 <BLANKLINE>
2430 """
2431 prop_changes_lines = [
2432 "Property changes on: %s" % filename,
2433 "___________________________________________________________________"]
2434 for key, value in props:
2435 prop_changes_lines.append("Added: " + key)
2436 prop_changes_lines.append(" + " + value)
2437 return "\n".join(prop_changes_lines) + "\n"
2438
2439
2440 def RealMain(argv, data=None):
2441 """The real main function.
2442
2443 Args:
2444 argv: Command line arguments.
2445 data: Diff contents. If None (default) the diff is generated by
2446 the VersionControlSystem implementation returned by GuessVCS().
2447
2448 Returns:
2449 A 2-tuple (issue id, patchset id).
2450 The patchset id is None if the base files are not uploaded by this
2451 script (applies only to SVN checkouts).
2452 """
2453 options, args = parser.parse_args(argv[1:])
2454 if options.help:
2455 if options.verbose < 2:
2456 # hide Perforce options
2457 parser.epilog = (
2458 "Use '--help -v' to show additional Perforce options. "
2459 "For more help, see "
2460 "http://code.google.com/p/rietveld/wiki/CodeReviewHelp"
2461 )
2462 parser.option_groups.remove(parser.get_option_group('--p4_port'))
2463 parser.print_help()
2464 sys.exit(0)
2465
2466 global verbosity
2467 verbosity = options.verbose
2468 if verbosity >= 3:
2469 logging.getLogger().setLevel(logging.DEBUG)
2470 elif verbosity >= 2:
2471 logging.getLogger().setLevel(logging.INFO)
2472
2473 vcs = GuessVCS(options)
2474
2475 base = options.base_url
2476 if isinstance(vcs, SubversionVCS):
2477 # Guessing the base field is only supported for Subversion.
2478 # Note: Fetching base files may become deprecated in future releases.
2479 guessed_base = vcs.GuessBase(options.download_base)
2480 if base:
2481 if guessed_base and base != guessed_base:
2482 print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \
2483 (base, guessed_base)
2484 else:
2485 base = guessed_base
2486
2487 if not base and options.download_base:
2488 options.download_base = True
2489 logging.info("Enabled upload of base file")
2490 if not options.assume_yes:
2491 vcs.CheckForUnknownFiles()
2492 if data is None:
2493 data = vcs.GenerateDiff(args)
2494 data = vcs.PostProcessDiff(data)
2495 if options.print_diffs:
2496 print "Rietveld diff start:*****"
2497 print data
2498 print "Rietveld diff end:*****"
2499 files = vcs.GetBaseFiles(data)
2500 if verbosity >= 1:
2501 print "Upload server:", options.server, "(change with -s/--server)"
2502 if options.use_oauth2:
2503 options.save_cookies = False
2504 rpc_server = GetRpcServer(options.server,
2505 options.email,
2506 options.host,
2507 options.save_cookies,
2508 options.account_type,
2509 options.use_oauth2,
2510 options.oauth2_port,
2511 options.open_oauth2_local_webbrowser)
2512 form_fields = []
2513
2514 repo_guid = vcs.GetGUID()
2515 if repo_guid:
2516 form_fields.append(("repo_guid", repo_guid))
2517 if base:
2518 b = urlparse.urlparse(base)
2519 username, netloc = urllib.splituser(b.netloc)
2520 if username:
2521 logging.info("Removed username from base URL")
2522 base = urlparse.urlunparse((b.scheme, netloc, b.path, b.params,
2523 b.query, b.fragment))
2524 form_fields.append(("base", base))
2525 if options.issue:
2526 form_fields.append(("issue", str(options.issue)))
2527 if options.email:
2528 form_fields.append(("user", options.email))
2529 if options.reviewers:
2530 for reviewer in options.reviewers.split(','):
2531 CheckReviewer(reviewer)
2532 form_fields.append(("reviewers", options.reviewers))
2533 if options.cc:
2534 for cc in options.cc.split(','):
2535 CheckReviewer(cc)
2536 form_fields.append(("cc", options.cc))
2537
2538 # Process --message, --title and --file.
2539 message = options.message or ""
2540 title = options.title or ""
2541 if options.file:
2542 if options.message:
2543 ErrorExit("Can't specify both message and message file options")
2544 file = open(options.file, 'r')
2545 message = file.read()
2546 file.close()
2547 if options.issue:
2548 prompt = "Title describing this patch set: "
2549 else:
2550 prompt = "New issue subject: "
2551 title = (
2552 title or message.split('\n', 1)[0].strip() or raw_input(prompt).strip())
2553 if not title and not options.issue:
2554 ErrorExit("A non-empty title is required for a new issue")
2555 # For existing issues, it's fine to give a patchset an empty name. Rietveld
2556 # doesn't accept that so use a whitespace.
2557 title = title or " "
2558 if len(title) > 100:
2559 title = title[:99] + '…'
2560 if title and not options.issue:
2561 message = message or title
2562
2563 form_fields.append(("subject", title))
2564 # If it's a new issue send message as description. Otherwise a new
2565 # message is created below on upload_complete.
2566 if message and not options.issue:
2567 form_fields.append(("description", message))
2568
2569 # Send a hash of all the base file so the server can determine if a copy
2570 # already exists in an earlier patchset.
2571 base_hashes = ""
2572 for file, info in files.iteritems():
2573 if not info[0] is None:
2574 checksum = md5(info[0]).hexdigest()
2575 if base_hashes:
2576 base_hashes += "|"
2577 base_hashes += checksum + ":" + file
2578 form_fields.append(("base_hashes", base_hashes))
2579 if options.private:
2580 if options.issue:
2581 print "Warning: Private flag ignored when updating an existing issue."
2582 else:
2583 form_fields.append(("private", "1"))
2584 if options.send_patch:
2585 options.send_mail = True
2586 if not options.download_base:
2587 form_fields.append(("content_upload", "1"))
2588 if len(data) > MAX_UPLOAD_SIZE:
2589 print "Patch is large, so uploading file patches separately."
2590 uploaded_diff_file = []
2591 form_fields.append(("separate_patches", "1"))
2592 else:
2593 uploaded_diff_file = [("data", "data.diff", data)]
2594 ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
2595 response_body = rpc_server.Send("/upload", body, content_type=ctype)
2596 patchset = None
2597 if not options.download_base or not uploaded_diff_file:
2598 lines = response_body.splitlines()
2599 if len(lines) >= 2:
2600 msg = lines[0]
2601 patchset = lines[1].strip()
2602 patches = [x.split(" ", 1) for x in lines[2:]]
2603 else:
2604 msg = response_body
2605 else:
2606 msg = response_body
2607 StatusUpdate(msg)
2608 if not response_body.startswith("Issue created.") and \
2609 not response_body.startswith("Issue updated."):
2610 sys.exit(0)
2611 issue = msg[msg.rfind("/")+1:]
2612
2613 if not uploaded_diff_file:
2614 result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
2615 if not options.download_base:
2616 patches = result
2617
2618 if not options.download_base:
2619 vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
2620
2621 payload = {} # payload for final request
2622 if options.send_mail:
2623 payload["send_mail"] = "yes"
2624 if options.send_patch:
2625 payload["attach_patch"] = "yes"
2626 if options.issue and message:
2627 payload["message"] = message
2628 payload = urllib.urlencode(payload)
2629 rpc_server.Send("/" + issue + "/upload_complete/" + (patchset or ""),
2630 payload=payload)
2631 return issue, patchset
2632
2633
2634 def main():
2635 try:
2636 logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
2637 "%(lineno)s %(message)s "))
2638 os.environ['LC_ALL'] = 'C'
2639 RealMain(sys.argv)
2640 except KeyboardInterrupt:
2641 print
2642 StatusUpdate("Interrupted.")
2643 sys.exit(1)
2644
2645
2646 if __name__ == "__main__":
2647 main()