Update upstream source from tag 'upstream/0.4.23'
Update to upstream version '0.4.23'
with Debian dir 3a3f2f64c7633534252572355f5b058cbdc69c5f
Jochen Sprickerhof
3 years ago
19 | 19 | # same version as in: |
20 | 20 | # - src/catkin_pkg/__init__.py |
21 | 21 | # - stdeb.cfg |
22 | 'version': '0.4.22', | |
22 | 'version': '0.4.23', | |
23 | 23 | 'packages': ['catkin_pkg', 'catkin_pkg.cli'], |
24 | 24 | 'package_dir': {'': 'src'}, |
25 | 25 | 'package_data': {'catkin_pkg': ['templates/*.in']}, |
34 | 34 | # same version as in: |
35 | 35 | # - setup.py |
36 | 36 | # - stdeb.cfg |
37 | __version__ = '0.4.22' | |
37 | __version__ = '0.4.23' |
47 | 47 | FORTHCOMING_LABEL = 'Forthcoming' |
48 | 48 | |
49 | 49 | |
50 | def get_all_changes(vcs_client, skip_merges=False): | |
50 | def get_all_changes(vcs_client, skip_merges=False, only_merges=False): | |
51 | 51 | tags = _get_version_tags(vcs_client) |
52 | 52 | |
53 | 53 | # query all log entries per tag range |
55 | 55 | previous_tag = Tag(None) |
56 | 56 | for tag in sorted_tags(tags): |
57 | 57 | log_entries = vcs_client.get_log_entries( |
58 | from_tag=previous_tag.name, to_tag=tag.name, skip_merges=skip_merges) | |
58 | from_tag=previous_tag.name, to_tag=tag.name, skip_merges=skip_merges, only_merges=only_merges) | |
59 | 59 | tag2log_entries[previous_tag] = log_entries |
60 | 60 | previous_tag = tag |
61 | 61 | log_entries = vcs_client.get_log_entries( |
62 | from_tag=previous_tag.name, to_tag=None, skip_merges=skip_merges) | |
62 | from_tag=previous_tag.name, to_tag=None, skip_merges=skip_merges, only_merges=only_merges) | |
63 | 63 | tag2log_entries[previous_tag] = log_entries |
64 | 64 | return tag2log_entries |
65 | 65 | |
66 | 66 | |
67 | def get_forthcoming_changes(vcs_client, skip_merges=False): | |
67 | def get_forthcoming_changes(vcs_client, skip_merges=False, only_merges=False): | |
68 | 68 | tags = _get_version_tags(vcs_client) |
69 | 69 | latest_tag_name = _get_latest_version_tag_name(vcs_client) |
70 | 70 | |
78 | 78 | # ignore non-forthcoming log entries but keep version to identify injection point of forthcoming |
79 | 79 | tag2log_entries[tag] = None |
80 | 80 | log_entries = vcs_client.get_log_entries( |
81 | from_tag=from_tag.name, to_tag=to_tag.name, skip_merges=skip_merges) | |
81 | from_tag=from_tag.name, to_tag=to_tag.name, skip_merges=skip_merges, only_merges=only_merges) | |
82 | 82 | tag2log_entries[from_tag] = log_entries |
83 | 83 | return tag2log_entries |
84 | 84 |
87 | 87 | def get_latest_tag_name(self): |
88 | 88 | raise NotImplementedError() |
89 | 89 | |
90 | def get_log_entries(self, from_tag, to_tag, skip_merges=False): | |
90 | def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): | |
91 | 91 | raise NotImplementedError() |
92 | 92 | |
93 | 93 | def replace_repository_references(self, line): |
178 | 178 | tag_name = result_describe['output'] |
179 | 179 | return tag_name |
180 | 180 | |
181 | def get_log_entries(self, from_tag, to_tag, skip_merges=False): | |
181 | def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): | |
182 | 182 | # query all hashes in the range |
183 | 183 | cmd = [self._executable, 'log'] |
184 | 184 | if from_tag or to_tag: |
185 | 185 | cmd.append('%s%s' % ('%s..' % to_tag if to_tag else '', from_tag if from_tag else '')) |
186 | 186 | cmd.append('--format=format:%H') |
187 | if skip_merges and only_merges: | |
188 | raise RuntimeError('Both "skip_merges" and "only_merges" are set to True, which contradicts.') | |
187 | 189 | if skip_merges: |
188 | 190 | cmd.append('--no-merges') |
191 | if only_merges: | |
192 | cmd.append('--merges') | |
189 | 193 | result = self._run_command(cmd) |
190 | 194 | if result['returncode']: |
191 | 195 | raise RuntimeError('Could not fetch commit hashes:\n%s' % result['output']) |
347 | 351 | raise RuntimeError('Could not find latest tagn') |
348 | 352 | return tag_name |
349 | 353 | |
350 | def get_log_entries(self, from_tag, to_tag, skip_merges=False): | |
354 | def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): | |
351 | 355 | # query all hashes in the range |
352 | 356 | # ascending chronological order since than it is easier to handle empty tag names |
353 | 357 | revrange = '%s:%s' % ((to_tag if to_tag else ''), (from_tag if from_tag else 'tip')) |
39 | 39 | |
40 | 40 | def main(sysargs=None): |
41 | 41 | parser = argparse.ArgumentParser(description='Generate a REP-0132 %s' % CHANGELOG_FILENAME) |
42 | group_merge = parser.add_mutually_exclusive_group() | |
42 | 43 | parser.add_argument( |
43 | 44 | '-a', '--all', action='store_true', default=False, |
44 | 45 | help='Generate changelog for all versions instead of only the forthcoming one (only supported when no changelog file exists yet)') |
46 | group_merge.add_argument( | |
47 | '--only-merges', action='store_true', default=False, | |
48 | help='Only add merge commits to the changelog') | |
45 | 49 | parser.add_argument( |
46 | 50 | '--print-root', action='store_true', default=False, |
47 | 51 | help='Output changelog content to the console as if there would be only one package in the root of the repository') |
48 | 52 | parser.add_argument( |
49 | 53 | '--skip-contributors', action='store_true', default=False, |
50 | 54 | help='Skip adding the list of contributors to the changelog') |
51 | parser.add_argument( | |
55 | group_merge.add_argument( | |
52 | 56 | '--skip-merges', action='store_true', default=False, |
53 | 57 | help='Skip adding merge commits to the changelog') |
54 | 58 | parser.add_argument( |
65 | 69 | # printing status messages to stderr to allow piping the changelog to a file |
66 | 70 | if args.all: |
67 | 71 | print('Querying all tags and commit information...', file=sys.stderr) |
68 | tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges) | |
72 | tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) | |
69 | 73 | print('Generating changelog output with all versions...', file=sys.stderr) |
70 | 74 | else: |
71 | 75 | print('Querying commit information since latest tag...', file=sys.stderr) |
72 | tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges) | |
76 | tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) | |
73 | 77 | print('Generating changelog files with forthcoming version...', file=sys.stderr) |
74 | 78 | print('', file=sys.stderr) |
75 | 79 | data = generate_changelog_file('repository-level', tag2log_entries, vcs_client=vcs_client) |
105 | 109 | |
106 | 110 | if args.all: |
107 | 111 | print('Querying all tags and commit information...') |
108 | tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges) | |
112 | tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) | |
109 | 113 | print('Generating changelog files with all versions...') |
110 | 114 | generate_changelogs(base_path, packages, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors) |
111 | 115 | else: |
112 | 116 | print('Querying commit information since latest tag...') |
113 | tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges) | |
117 | tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) | |
114 | 118 | # separate packages with/without a changelog file |
115 | 119 | packages_without = {pkg_path: package for pkg_path, package in packages.items() if package.name in missing_changelogs} |
116 | 120 | if packages_without: |
49 | 49 | value = pp.Word(pp.alphanums + '_-').setName('value') |
50 | 50 | value.setParseAction(_Value) |
51 | 51 | |
52 | comparison_term = identifier | value | |
52 | double_quoted_value = pp.QuotedString('"').setName( | |
53 | 'double_quoted_value') | |
54 | double_quoted_value.setParseAction(_Value) | |
55 | single_quoted_value = pp.QuotedString("'").setName( | |
56 | 'single_quoted_value') | |
57 | single_quoted_value.setParseAction(_Value) | |
58 | ||
59 | comparison_term = identifier | value | double_quoted_value | \ | |
60 | single_quoted_value | |
53 | 61 | |
54 | 62 | condition = pp.Group(comparison_term + operator + comparison_term).setName('condition') |
55 | 63 | condition.setParseAction(_Condition) |
337 | 337 | def __eq__(self, other): |
338 | 338 | if not isinstance(other, Dependency): |
339 | 339 | return False |
340 | return all(getattr(self, attr) == getattr(other, attr) for attr in self.__slots__) | |
340 | return all(getattr(self, attr) == getattr(other, attr) for attr in self.__slots__ if attr != 'evaluated_condition') | |
341 | 341 | |
342 | 342 | def __hash__(self): |
343 | 343 | return hash(tuple(getattr(self, slot) for slot in self.__slots__)) |
671 | 671 | depends = _get_dependencies(root, 'depend') |
672 | 672 | for dep in depends: |
673 | 673 | # check for collisions with specific dependencies |
674 | same_build_depends = ['build_depend' for d in pkg.build_depends if d.name == dep.name] | |
675 | same_build_export_depends = ['build_export_depend' for d in pkg.build_export_depends if d.name == dep.name] | |
676 | same_exec_depends = ['exec_depend' for d in pkg.exec_depends if d.name == dep.name] | |
674 | same_build_depends = ['build_depend' for d in pkg.build_depends if d == dep] | |
675 | same_build_export_depends = ['build_export_depend' for d in pkg.build_export_depends if d == dep] | |
676 | same_exec_depends = ['exec_depend' for d in pkg.exec_depends if d == dep] | |
677 | 677 | if same_build_depends or same_build_export_depends or same_exec_depends: |
678 | 678 | errors.append("The generic dependency on '%s' is redundant with: %s" % (dep.name, ', '.join(same_build_depends + same_build_export_depends + same_exec_depends))) |
679 | 679 | # only append non-duplicates |
694 | 694 | |
695 | 695 | if pkg.package_format == 1: |
696 | 696 | for test_depend in pkg.test_depends: |
697 | same_build_depends = ['build_depend' for d in pkg.build_depends if d.name == test_depend.name] | |
698 | same_run_depends = ['run_depend' for d in pkg.run_depends if d.name == test_depend.name] | |
697 | same_build_depends = ['build_depend' for d in pkg.build_depends if d == test_depend] | |
698 | same_run_depends = ['run_depend' for d in pkg.run_depends if d == test_depend] | |
699 | 699 | if same_build_depends or same_run_depends: |
700 | 700 | errors.append('The test dependency on "%s" is redundant with: %s' % (test_depend.name, ', '.join(same_build_depends + same_run_depends))) |
701 | 701 |
2 | 2 | ; catkin-pkg-modules same version as in: |
3 | 3 | ; - setup.py |
4 | 4 | ; - src/catkin_pkg/__init__.py |
5 | Depends: python-argparse, python-catkin-pkg-modules (>= 0.4.22), python-dateutil, python-docutils | |
5 | Depends: python-argparse, python-catkin-pkg-modules (>= 0.4.23), python-dateutil, python-docutils | |
6 | 6 | ; catkin-pkg-modules same version as in: |
7 | 7 | ; - setup.py |
8 | 8 | ; - src/catkin_pkg/__init__.py |
9 | Depends3: python3-catkin-pkg-modules (>= 0.4.22), python3-dateutil, python3-docutils | |
9 | Depends3: python3-catkin-pkg-modules (>= 0.4.23), python3-dateutil, python3-docutils | |
10 | 10 | Conflicts: catkin, python3-catkin-pkg |
11 | 11 | Conflicts3: catkin, python-catkin-pkg |
12 | 12 | Copyright-File: LICENSE |
118 | 118 | self.assertTrue(dep.evaluate_condition({})) |
119 | 119 | |
120 | 120 | dep = Dependency('foo', condition='foo <= bar or bar >= baz') |
121 | self.assertFalse(dep.evaluate_condition({})) | |
122 | ||
123 | dep = Dependency('foo', condition='$foo == ""') | |
124 | self.assertTrue(dep.evaluate_condition({})) | |
125 | self.assertFalse(dep.evaluate_condition({'foo': 'foo'})) | |
126 | ||
127 | dep = Dependency('foo', condition='$foo == "foo \' bar"') | |
128 | self.assertTrue(dep.evaluate_condition({'foo': "foo ' bar"})) | |
129 | self.assertFalse(dep.evaluate_condition({})) | |
130 | ||
131 | dep = Dependency('foo', condition="$foo == ''") | |
132 | self.assertTrue(dep.evaluate_condition({})) | |
133 | self.assertFalse(dep.evaluate_condition({'foo': 'foo'})) | |
134 | ||
135 | dep = Dependency('foo', condition="$foo == 'foo \" bar'") | |
136 | self.assertTrue(dep.evaluate_condition({'foo': 'foo " bar'})) | |
121 | 137 | self.assertFalse(dep.evaluate_condition({})) |
122 | 138 | |
123 | 139 | # Testing for more than 1 conditions |