Codebase list mozc / ba394a5
Imported Upstream version 0.12.422.102 Nobuhiro Iwamatsu 13 years ago
753 changed file(s) with 303 addition(s) and 48720 deletion(s). Raw diff Collapse all Expand all
4848 = "\\\\.\\mailslot\\googlejapaneseinput.character_pad.";
4949 #ifdef GOOGLE_JAPANESE_INPUT_BUILD
5050 const char kEventPathPrefix[] = "Global\\GoogleJapaneseInput.event.";
51 const char kMutexPathPrefix[] = "Global\\GoogleJapaneseInput.mutex.";
5152 #else
5253 const char kEventPathPrefix[] = "Global\\Mozc.event.";
54 const char kMutexPathPrefix[] = "Global\\Mozc.mutex.";
5355 #endif // GOOGLE_JAPANESE_INPUT_BUILD
5456 const char kMozcServerName[] = "GoogleIMEJaConverter.exe";
5557 const char kIMEFile[] = "GoogleIMEJa.ime";
3636 #ifdef OS_WINDOWS
3737 #include <windows.h>
3838 #include <psapi.h> // GetModuleFileNameExW
39 #endif
39 #else
40 // For stat system call
41 #include <sys/types.h>
42 #include <sys/stat.h>
43 #include <unistd.h>
44 #endif // OS_WINDOWS
4045
4146 #ifdef OS_MACOSX
4247 #include <sys/sysctl.h>
175180 : mutex_(new Mutex),
176181 ipc_path_info_(new ipc::IPCPathInfo),
177182 name_(name),
178 server_pid_(0) {}
183 server_pid_(0),
184 last_modified_(-1) {}
179185
180186 IPCPathManager::~IPCPathManager() {}
181187
233239
234240 VLOG(1) << "ServerIPCKey: " << ipc_path_info_->key();
235241
242 last_modified_ = GetIPCFileTimeStamp();
236243 return true;
237244 }
238245
242249 return false;
243250 }
244251
245 if (ipc_path_info_->key().empty() && !LoadPathName()) {
252 if ((ShouldReload() || ipc_path_info_->key().empty()) && !LoadPathName()) {
246253 LOG(ERROR) << "GetPathName failed";
247254 return false;
248255 }
379386 return false;
380387 }
381388
389 bool IPCPathManager::ShouldReload() const {
390 #ifdef OS_WINDOWS
391 // In windows, no reloading mechanism is necessary because IPC files
392 // are automatically removed.
393 return false;
394 #else
395 scoped_lock l(mutex_.get());
396
397 time_t last_modified = GetIPCFileTimeStamp();
398 if (last_modified == last_modified_) {
399 return false;
400 }
401
402 return true;
403 #endif // OS_WINDOWS
404 }
405
406 time_t IPCPathManager::GetIPCFileTimeStamp() const {
407 #ifdef OS_WINDOWS
408 // In windows, we don't need to get the exact file timestamp, so
409 // just returns -1 at this time.
410 return static_cast<time_t>(-1);
411 #else
412 const string filename = GetIPCKeyFileName(name_);
413 struct stat filestat;
414 if (::stat(filename.c_str(), &filestat) == -1) {
415 VLOG(2) << "stat(2) failed. Skipping reload";
416 return static_cast<time_t>(-1);
417 }
418 return filestat.st_mtime;
419 #endif // OS_WINDOWS
420 }
421
382422 bool IPCPathManager::LoadPathName() {
383423 scoped_lock l(mutex_.get());
384424
459499 VLOG(1) << "ClientIPCKey: " << ipc_path_info_->key();
460500 VLOG(1) << "ProtocolVersion: " << ipc_path_info_->protocol_version();
461501
502 last_modified_ = GetIPCFileTimeStamp();
462503 return true;
463504 }
464505 } // namespace mozc
2929 #ifndef MOZC_IPC_IPC_PATH_MANAGER_H_
3030 #define MOZC_IPC_IPC_PATH_MANAGER_H_
3131
32 #ifdef OS_WINDOWS
33 #include <time.h> // for time_t
34 #else
35 #include <sys/time.h> // for time_t
36 #endif // OS_WINDOWS
3237 #include <string>
3338 #include "base/base.h"
3439 #include "base/mutex.h"
40 // For FRIEND_TEST
41 #include "testing/base/public/gunit_prod.h"
3542
3643 namespace mozc {
3744
9299 virtual ~IPCPathManager();
93100
94101 private:
102 FRIEND_TEST(IPCPathManagerTest, ReloadTest);
95103
96104 // Load ipc name from ~/.mozc/.ipc
97105 // Note that this method overwrites the ipc_key_
98106 bool LoadPathName();
107
108 // Returns true if the ipc file is updated after it load.
109 bool ShouldReload() const;
110
111 // Returns the last modified timestamp of the IPC file.
112 time_t GetIPCFileTimeStamp() const;
99113
100114 scoped_ptr<ProcessMutex> path_mutex_; // lock ipc path file
101115 scoped_ptr<Mutex> mutex_; // mutex for methods
103117 string name_;
104118 string server_path_; // cache for server_path
105119 uint32 server_pid_; // cache for pid of server_path
120 time_t last_modified_;
106121 };
107122 } // mozc
108123
2727 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2828
2929 #include "base/base.h"
30 #include "base/file_stream.h"
31 #include "base/process_mutex.h"
3032 #include "base/util.h"
3133 #include "base/thread.h"
3234 #include "ipc/ipc_path_manager.h"
7173 }
7274 }
7375 };
76 } // anonymous namespace
7477
7578 TEST(IPCPathManagerTest, IPCPathManagerTest) {
7679 mozc::Util::SetUserProfileDirectory(FLAGS_test_tmpdir);
101104 threads[i].Join();
102105 }
103106 }
104 } // namespace
107
108 TEST(IPCPathManagerTest, ReloadTest) {
109 // We have only mock implementations for Windows, so no test should be run.
110 #ifndef OS_WINDOWS
111 mozc::IPCPathManager *manager =
112 mozc::IPCPathManager::GetIPCPathManager("reload_test");
113
114 EXPECT_TRUE(manager->CreateNewPathName());
115 EXPECT_TRUE(manager->SavePathName());
116
117 // Just after the save, there are no need to reload.
118 EXPECT_FALSE(manager->ShouldReload());
119
120 // Modify the saved file explicitly.
121 EXPECT_TRUE(manager->path_mutex_->UnLock());
122 Util::Sleep(1000 /* msec */);
123 string filename = Util::JoinPath(
124 Util::GetUserProfileDirectory(), ".reload_test.ipc");
125 OutputFileStream outf(filename.c_str());
126 outf << "foobar";
127 outf.close();
128
129 EXPECT_TRUE(manager->ShouldReload());
130 #endif // OS_WINDOWS
131 }
105132 } // mozc
3434 #include <windows.h>
3535 #include <string>
3636 #include "base/base.h"
37 #include "base/const.h"
3738 #include "base/mutex.h"
39 #include "base/singleton.h"
3840 #include "base/util.h"
3941 #include "ipc/ipc_path_manager.h"
4042 #include "third_party/mozc/sandbox/security_attributes.h"
6668 reinterpret_cast<FPGetNamedPipeServerProcessId>
6769 (::GetProcAddress(lib, "GetNamedPipeServerProcessId"));
6870 }
71
72 class IPCClientMutex {
73 public:
74 IPCClientMutex() {
75 // Make a kernel mutex object so that multiple ipc connections are
76 // serialized here. In Windows, there is no useful way to serialize
77 // the multiple connections to the single-thread named pipe server.
78 // WaitForNamedPipe doesn't work for this propose as it just lets
79 // clients know that the connection becomes "available" right now.
80 // It doesn't mean that connection is available for the current
81 /// thread. The "available" notification is sent to all waiting ipc
82 // clients at the same time and only one client gets the connection.
83 // This causes redundant and wasteful CreateFile calles.
84 string mutex_name = kMutexPathPrefix;
85 mutex_name += Util::GetUserSidAsString();
86 mutex_name += ".ipc";
87 wstring wmutex_name;
88 Util::UTF8ToWide(mutex_name.c_str(), &wmutex_name);
89
90 LPSECURITY_ATTRIBUTES security_attributes_ptr = NULL;
91 SECURITY_ATTRIBUTES security_attributes;
92 if (!sandbox::MakeSecurityAttributes(&security_attributes)) {
93 LOG(ERROR) << "Cannot make SecurityAttributes";
94 } else {
95 security_attributes_ptr = &security_attributes;
96 }
97
98 // http://msdn.microsoft.com/en-us/library/ms682411(VS.85).aspx:
99 // Two or more processes can call CreateMutex to create the same named
100 // mutex. The first process actually creates the mutex, and subsequent
101 // processes with sufficient access rights simply open a handle to
102 // the existing mutex. This enables multiple processes to get handles
103 // of the same mutex, while relieving the user of the responsibility
104 // of ensuring that the creating process is started first.
105 // When using this technique, you should set the
106 // bInitialOwner flag to FALSE; otherwise, it can be difficult to be
107 // certain which process has initial ownership.
108 ipc_mutex_.reset(::CreateMutex(security_attributes_ptr,
109 FALSE, wmutex_name.c_str()));
110
111 if (ipc_mutex_.get() == NULL) {
112 LOG(ERROR) << "CreateMutex failed: " << ::GetLastError();
113 return;
114 }
115
116 // permit the access from a process runinning with low integrity level
117 if (Util::IsVistaOrLater()) {
118 sandbox::SetMandatoryLabelW(ipc_mutex_.get(),
119 SE_KERNEL_OBJECT, L"NX", L"LW");
120 }
121 }
122
123 virtual ~IPCClientMutex() {}
124
125 HANDLE get() const {
126 return ipc_mutex_.get();
127 }
128
129 private:
130 ScopedHandle ipc_mutex_;
131 };
132
133 // RAII class for calling ReleaseMutex in destructor.
134 class ScopedReleaseMutex {
135 public:
136 ScopedReleaseMutex(HANDLE handle)
137 : handle_(handle) {}
138
139 virtual ~ScopedReleaseMutex() {
140 if (NULL != handle_) {
141 ::ReleaseMutex(handle_);
142 }
143 }
144
145 HANDLE get() const { return handle_; }
146 private:
147 HANDLE handle_;
148 };
69149
70150 uint32 GetServerProcessId(HANDLE handle) {
71151 CallOnce(&g_once, &InitFPGetNamedPipeServerProcessId);
385465 void IPCClient::Init(const string &name, const string &server_path) {
386466 last_ipc_error_ = IPC_NO_CONNECTION;
387467
468 // TODO(taku): ICPClientMutex doesn't take IPC path name into consideration.
469 // Currently, it is not a critical problem, as we only have single
470 // channel (session).
471 ScopedReleaseMutex ipc_mutex(Singleton<IPCClientMutex>::get()->get());
472
473 if (ipc_mutex.get() == NULL) {
474 LOG(ERROR) << "IPC mutex is not available";
475 } else {
476 const int kMutexTimeout = 10 * 1000; // wait at most 10sec.
477 switch (::WaitForSingleObject(ipc_mutex.get(), kMutexTimeout)) {
478 case WAIT_TIMEOUT:
479 // TODO(taku): with suspend/resume, WaitForSingleObject may
480 // return WAIT_TIMEOUT. We have to consider the case
481 // in the future.
482 LOG(ERROR) << "IPC client was not available even after "
483 << kMutexTimeout << " msec.";
484 break;
485 case WAIT_ABANDONED:
486 DLOG(INFO) << "mutex object was removed";
487 break;
488 case WAIT_OBJECT_0:
489 break;
490 default:
491 break;
492 }
493 }
494
388495 IPCPathManager *manager = IPCPathManager::GetIPCPathManager(name);
389496 if (manager == NULL) {
390497 LOG(ERROR) << "IPCPathManager::GetIPCPathManager failed";
440547 }
441548
442549 // wait for 10 second until server is ready
443 // TODO(taku): control the timout via flag.
550 // TODO(taku): control the timeout via flag.
444551 #ifdef _DEBUG
445552 const int kNamedPipeTimeout = 100000; // 100 sec
446553 #else
+0
-5
mozc_build_tools/gyp/AUTHORS less more
0 # Names should be added to this file like so:
1 # Name or Organization <email address>
2
3 Google Inc.
4 Steven Knight <knight@baldmt.com>
+0
-8
mozc_build_tools/gyp/DEPS less more
0 # DEPS file for gclient use in buildbot execution of gyp tests.
1 #
2 # (You don't need to use gclient for normal GYP development work.)
3
4 deps = {
5 "scons":
6 "svn://chrome-svn.corp.google.com/chrome/trunk/src/third_party/scons",
7 }
+0
-27
mozc_build_tools/gyp/LICENSE less more
0 Copyright (c) 2009 Google Inc. All rights reserved.
1
2 Redistribution and use in source and binary forms, with or without
3 modification, are permitted provided that the following conditions are
4 met:
5
6 * Redistributions of source code must retain the above copyright
7 notice, this list of conditions and the following disclaimer.
8 * Redistributions in binary form must reproduce the above
9 copyright notice, this list of conditions and the following disclaimer
10 in the documentation and/or other materials provided with the
11 distribution.
12 * Neither the name of Google Inc. nor the names of its
13 contributors may be used to endorse or promote products derived from
14 this software without specific prior written permission.
15
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+0
-21
mozc_build_tools/gyp/MANIFEST less more
0 setup.py
1 gyp
2 LICENSE
3 AUTHORS
4 pylib/gyp/MSVSNew.py
5 pylib/gyp/MSVSProject.py
6 pylib/gyp/MSVSToolFile.py
7 pylib/gyp/MSVSUserFile.py
8 pylib/gyp/MSVSVersion.py
9 pylib/gyp/SCons.py
10 pylib/gyp/__init__.py
11 pylib/gyp/common.py
12 pylib/gyp/input.py
13 pylib/gyp/xcodeproj_file.py
14 pylib/gyp/generator/__init__.py
15 pylib/gyp/generator/gypd.py
16 pylib/gyp/generator/gypsh.py
17 pylib/gyp/generator/make.py
18 pylib/gyp/generator/msvs.py
19 pylib/gyp/generator/scons.py
20 pylib/gyp/generator/xcode.py
+0
-53
mozc_build_tools/gyp/PRESUBMIT.py less more
0 # Copyright 2010, Google Inc.
1 # All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29
30 EXCLUDED_PATHS = ()
31
32
33 def CheckChangeOnUpload(input_api, output_api):
34 report = []
35 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
36 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
37 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
38 input_api, output_api, sources))
39 return report
40
41
42 def CheckChangeOnCommit(input_api, output_api):
43 report = []
44 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
45 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
46 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
47 input_api, output_api, sources))
48 report.extend(input_api.canned_checks.CheckTreeIsOpen(
49 input_api, output_api,
50 'http://gyp-status.appspot.com/status',
51 'http://gyp-status.appspot.com/current'))
52 return report
+0
-10
mozc_build_tools/gyp/codereview.settings less more
0 # This file is used by gcl to get repository specific information.
1 CODE_REVIEW_SERVER: codereview.chromium.org
2 CC_LIST: gyp-developer@googlegroups.com
3 VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
4 TRY_ON_UPLOAD: True
5 TRYSERVER_PROJECT: gyp
6 TRYSERVER_PATCHLEVEL: 0
7 TRYSERVER_ROOT: trunk
8 TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
9
+0
-18
mozc_build_tools/gyp/gyp less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 # TODO(mark): sys.path manipulation is some temporary testing stuff.
9 try:
10 import gyp
11 except ImportError, e:
12 import os.path
13 sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
14 import gyp
15
16 if __name__ == '__main__':
17 sys.exit(gyp.main(sys.argv[1:]))
+0
-5
mozc_build_tools/gyp/gyp.bat less more
0 @rem Copyright (c) 2009 Google Inc. All rights reserved.
1 @rem Use of this source code is governed by a BSD-style license that can be
2 @rem found in the LICENSE file.
3
4 @python "%~dp0/gyp" %*
+0
-7
mozc_build_tools/gyp/gyp_dummy.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 int main() {
5 return 0;
6 }
+0
-255
mozc_build_tools/gyp/gyptest.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 __doc__ = """
7 gyptest.py -- test runner for GYP tests.
8 """
9
10 import os
11 import optparse
12 import subprocess
13 import sys
14
15 class CommandRunner:
16 """
17 Executor class for commands, including "commands" implemented by
18 Python functions.
19 """
20 verbose = True
21 active = True
22
23 def __init__(self, dictionary={}):
24 self.subst_dictionary(dictionary)
25
26 def subst_dictionary(self, dictionary):
27 self._subst_dictionary = dictionary
28
29 def subst(self, string, dictionary=None):
30 """
31 Substitutes (via the format operator) the values in the specified
32 dictionary into the specified command.
33
34 The command can be an (action, string) tuple. In all cases, we
35 perform substitution on strings and don't worry if something isn't
36 a string. (It's probably a Python function to be executed.)
37 """
38 if dictionary is None:
39 dictionary = self._subst_dictionary
40 if dictionary:
41 try:
42 string = string % dictionary
43 except TypeError:
44 pass
45 return string
46
47 def display(self, command, stdout=None, stderr=None):
48 if not self.verbose:
49 return
50 if type(command) == type(()):
51 func = command[0]
52 args = command[1:]
53 s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
54 if type(command) == type([]):
55 # TODO: quote arguments containing spaces
56 # TODO: handle meta characters?
57 s = ' '.join(command)
58 else:
59 s = self.subst(command)
60 if not s.endswith('\n'):
61 s += '\n'
62 sys.stdout.write(s)
63 sys.stdout.flush()
64
65 def execute(self, command, stdout=None, stderr=None):
66 """
67 Executes a single command.
68 """
69 if not self.active:
70 return 0
71 if type(command) == type(''):
72 command = self.subst(command)
73 cmdargs = shlex.split(command)
74 if cmdargs[0] == 'cd':
75 command = (os.chdir,) + tuple(cmdargs[1:])
76 if type(command) == type(()):
77 func = command[0]
78 args = command[1:]
79 return func(*args)
80 else:
81 if stdout is sys.stdout:
82 # Same as passing sys.stdout, except python2.4 doesn't fail on it.
83 subout = None
84 else:
85 # Open pipe for anything else so Popen works on python2.4.
86 subout = subprocess.PIPE
87 if stderr is sys.stderr:
88 # Same as passing sys.stderr, except python2.4 doesn't fail on it.
89 suberr = None
90 elif stderr is None:
91 # Merge with stdout if stderr isn't specified.
92 suberr = subprocess.STDOUT
93 else:
94 # Open pipe for anything else so Popen works on python2.4.
95 suberr = subprocess.PIPE
96 p = subprocess.Popen(command,
97 shell=(sys.platform == 'win32'),
98 stdout=subout,
99 stderr=suberr)
100 p.wait()
101 if stdout is None:
102 self.stdout = p.stdout.read()
103 elif stdout is not sys.stdout:
104 stdout.write(p.stdout.read())
105 if stderr not in (None, sys.stderr):
106 stderr.write(p.stderr.read())
107 return p.returncode
108
109 def run(self, command, display=None, stdout=None, stderr=None):
110 """
111 Runs a single command, displaying it first.
112 """
113 if display is None:
114 display = command
115 self.display(display)
116 return self.execute(command, stdout, stderr)
117
118
119 class Unbuffered:
120 def __init__(self, fp):
121 self.fp = fp
122 def write(self, arg):
123 self.fp.write(arg)
124 self.fp.flush()
125 def __getattr__(self, attr):
126 return getattr(self.fp, attr)
127
128 sys.stdout = Unbuffered(sys.stdout)
129 sys.stderr = Unbuffered(sys.stderr)
130
131
132 def find_all_gyptest_files(directory):
133 result = []
134 for root, dirs, files in os.walk(directory):
135 if '.svn' in dirs:
136 dirs.remove('.svn')
137 result.extend([ os.path.join(root, f) for f in files
138 if f.startswith('gyptest') and f.endswith('.py') ])
139 result.sort()
140 return result
141
142
143 def main(argv=None):
144 if argv is None:
145 argv = sys.argv
146
147 usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
148 parser = optparse.OptionParser(usage=usage)
149 parser.add_option("-a", "--all", action="store_true",
150 help="run all tests")
151 parser.add_option("-C", "--chdir", action="store", default=None,
152 help="chdir to the specified directory")
153 parser.add_option("-f", "--format", action="store", default='',
154 help="run tests with the specified formats")
155 parser.add_option("-l", "--list", action="store_true",
156 help="list available tests and exit")
157 parser.add_option("-n", "--no-exec", action="store_true",
158 help="no execute, just print the command line")
159 parser.add_option("--passed", action="store_true",
160 help="report passed tests")
161 parser.add_option("--path", action="append", default=[],
162 help="additional $PATH directory")
163 parser.add_option("-q", "--quiet", action="store_true",
164 help="quiet, don't print test command lines")
165 opts, args = parser.parse_args(argv[1:])
166
167 if opts.chdir:
168 os.chdir(opts.chdir)
169
170 if opts.path:
171 os.environ['PATH'] += ':' + ':'.join(opts.path)
172
173 if not args:
174 if not opts.all:
175 sys.stderr.write('Specify -a to get all tests.\n')
176 return 1
177 args = ['test']
178
179 tests = []
180 for arg in args:
181 if os.path.isdir(arg):
182 tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
183 else:
184 tests.append(arg)
185
186 if opts.list:
187 for test in tests:
188 print test
189 sys.exit(0)
190
191 CommandRunner.verbose = not opts.quiet
192 CommandRunner.active = not opts.no_exec
193 cr = CommandRunner()
194
195 os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
196 if not opts.quiet:
197 sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
198
199 passed = []
200 failed = []
201 no_result = []
202
203 if opts.format:
204 format_list = opts.format.split(',')
205 else:
206 # TODO: not duplicate this mapping from pylib/gyp/__init__.py
207 format_list = [ {
208 'freebsd7': 'make',
209 'freebsd8': 'make',
210 'cygwin': 'msvs',
211 'win32': 'msvs',
212 'linux2': 'make',
213 'darwin': 'xcode',
214 }[sys.platform] ]
215
216 for format in format_list:
217 os.environ['TESTGYP_FORMAT'] = format
218 if not opts.quiet:
219 sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
220
221 for test in tests:
222 status = cr.run([sys.executable, test],
223 stdout=sys.stdout,
224 stderr=sys.stderr)
225 if status == 2:
226 no_result.append(test)
227 elif status:
228 failed.append(test)
229 else:
230 passed.append(test)
231
232 if not opts.quiet:
233 def report(description, tests):
234 if tests:
235 if len(tests) == 1:
236 sys.stdout.write("\n%s the following test:\n" % description)
237 else:
238 fmt = "\n%s the following %d tests:\n"
239 sys.stdout.write(fmt % (description, len(tests)))
240 sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
241
242 if opts.passed:
243 report("Passed", passed)
244 report("Failed", failed)
245 report("No result from", no_result)
246
247 if failed:
248 return 1
249 else:
250 return 0
251
252
253 if __name__ == "__main__":
254 sys.exit(main())
+0
-331
mozc_build_tools/gyp/pylib/gyp/MSVSNew.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """New implementation of Visual Studio project generation for SCons."""
7
8 import common
9 import os
10 import random
11
12 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
13 # and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
14 # available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
15 # preserving 2.4 compatibility.
16 try:
17 import hashlib
18 _new_md5 = hashlib.md5
19 except ImportError:
20 import md5
21 _new_md5 = md5.new
22
23
24 # Initialize random number generator
25 random.seed()
26
27 # GUIDs for project types
28 ENTRY_TYPE_GUIDS = {
29 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
30 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
31 }
32
33 #------------------------------------------------------------------------------
34 # Helper functions
35
36
37 def MakeGuid(name, seed='msvs_new'):
38 """Returns a GUID for the specified target name.
39
40 Args:
41 name: Target name.
42 seed: Seed for MD5 hash.
43 Returns:
44 A GUID-line string calculated from the name and seed.
45
46 This generates something which looks like a GUID, but depends only on the
47 name and seed. This means the same name/seed will always generate the same
48 GUID, so that projects and solutions which refer to each other can explicitly
49 determine the GUID to refer to explicitly. It also means that the GUID will
50 not change when the project for a target is rebuilt.
51 """
52 # Calculate a MD5 signature for the seed and name.
53 d = _new_md5(str(seed) + str(name)).hexdigest().upper()
54 # Convert most of the signature to GUID form (discard the rest)
55 guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
56 + '-' + d[20:32] + '}')
57 return guid
58
59 #------------------------------------------------------------------------------
60
61
62 class MSVSFolder:
63 """Folder in a Visual Studio project or solution."""
64
65 def __init__(self, path, name = None, entries = None,
66 guid = None, items = None):
67 """Initializes the folder.
68
69 Args:
70 path: Full path to the folder.
71 name: Name of the folder.
72 entries: List of folder entries to nest inside this folder. May contain
73 Folder or Project objects. May be None, if the folder is empty.
74 guid: GUID to use for folder, if not None.
75 items: List of solution items to include in the folder project. May be
76 None, if the folder does not directly contain items.
77 """
78 if name:
79 self.name = name
80 else:
81 # Use last layer.
82 self.name = os.path.basename(path)
83
84 self.path = path
85 self.guid = guid
86
87 # Copy passed lists (or set to empty lists)
88 self.entries = list(entries or [])
89 self.items = list(items or [])
90
91 self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
92
93 def get_guid(self):
94 if self.guid is None:
95 # Use consistent guids for folders (so things don't regenerate).
96 self.guid = MakeGuid(self.path, seed='msvs_folder')
97 return self.guid
98
99
100 #------------------------------------------------------------------------------
101
102
103 class MSVSProject:
104 """Visual Studio project."""
105
106 def __init__(self, path, name = None, dependencies = None, guid = None,
107 config_platform_overrides = None):
108 """Initializes the project.
109
110 Args:
111 path: Relative path to project file.
112 name: Name of project. If None, the name will be the same as the base
113 name of the project file.
114 dependencies: List of other Project objects this project is dependent
115 upon, if not None.
116 guid: GUID to use for project, if not None.
117 config_platform_overrides: optional dict of configuration platforms to
118 used in place of the default for this target.
119 """
120 self.path = path
121 self.guid = guid
122
123 if name:
124 self.name = name
125 else:
126 # Use project filename
127 self.name = os.path.splitext(os.path.basename(path))[0]
128
129 # Copy passed lists (or set to empty lists)
130 self.dependencies = list(dependencies or [])
131
132 self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
133
134 if config_platform_overrides:
135 self.config_platform_overrides = config_platform_overrides
136 else:
137 self.config_platform_overrides = {}
138
139 def get_guid(self):
140 if self.guid is None:
141 # Set GUID from path
142 # TODO(rspangler): This is fragile.
143 # 1. We can't just use the project filename sans path, since there could
144 # be multiple projects with the same base name (for example,
145 # foo/unittest.vcproj and bar/unittest.vcproj).
146 # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
147 # GUID is the same whether it's included from base/base.sln or
148 # foo/bar/baz/baz.sln.
149 # 3. The GUID needs to be the same each time this builder is invoked, so
150 # that we don't need to rebuild the solution when the project changes.
151 # 4. We should be able to handle pre-built project files by reading the
152 # GUID from the files.
153 self.guid = MakeGuid(self.name)
154 return self.guid
155
156 #------------------------------------------------------------------------------
157
158
159 class MSVSSolution:
160 """Visual Studio solution."""
161
162 def __init__(self, path, version, entries=None, variants=None,
163 websiteProperties=True):
164 """Initializes the solution.
165
166 Args:
167 path: Path to solution file.
168 version: Format version to emit.
169 entries: List of entries in solution. May contain Folder or Project
170 objects. May be None, if the folder is empty.
171 variants: List of build variant strings. If none, a default list will
172 be used.
173 websiteProperties: Flag to decide if the website properties section
174 is generated.
175 """
176 self.path = path
177 self.websiteProperties = websiteProperties
178 self.version = version
179
180 # Copy passed lists (or set to empty lists)
181 self.entries = list(entries or [])
182
183 if variants:
184 # Copy passed list
185 self.variants = variants[:]
186 else:
187 # Use default
188 self.variants = ['Debug|Win32', 'Release|Win32']
189 # TODO(rspangler): Need to be able to handle a mapping of solution config
190 # to project config. Should we be able to handle variants being a dict,
191 # or add a separate variant_map variable? If it's a dict, we can't
192 # guarantee the order of variants since dict keys aren't ordered.
193
194
195 # TODO(rspangler): Automatically write to disk for now; should delay until
196 # node-evaluation time.
197 self.Write()
198
199
200 def Write(self, writer=common.WriteOnDiff):
201 """Writes the solution file to disk.
202
203 Raises:
204 IndexError: An entry appears multiple times.
205 """
206 # Walk the entry tree and collect all the folders and projects.
207 all_entries = []
208 entries_to_check = self.entries[:]
209 while entries_to_check:
210 # Pop from the beginning of the list to preserve the user's order.
211 e = entries_to_check.pop(0)
212
213 # A project or folder can only appear once in the solution's folder tree.
214 # This also protects from cycles.
215 if e in all_entries:
216 #raise IndexError('Entry "%s" appears more than once in solution' %
217 # e.name)
218 continue
219
220 all_entries.append(e)
221
222 # If this is a folder, check its entries too.
223 if isinstance(e, MSVSFolder):
224 entries_to_check += e.entries
225
226 # Sort by name then guid (so things are in order on vs2008).
227 def NameThenGuid(a, b):
228 if a.name < b.name: return -1
229 if a.name > b.name: return 1
230 if a.get_guid() < b.get_guid(): return -1
231 if a.get_guid() > b.get_guid(): return 1
232 return 0
233
234 all_entries = sorted(all_entries, NameThenGuid)
235
236 # Open file and print header
237 f = writer(self.path)
238 f.write('Microsoft Visual Studio Solution File, '
239 'Format Version %s\r\n' % self.version.SolutionVersion())
240 f.write('# %s\r\n' % self.version.Description())
241
242 # Project entries
243 for e in all_entries:
244 f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
245 e.entry_type_guid, # Entry type GUID
246 e.name, # Folder name
247 e.path.replace('/', '\\'), # Folder name (again)
248 e.get_guid(), # Entry GUID
249 ))
250
251 # TODO(rspangler): Need a way to configure this stuff
252 if self.websiteProperties:
253 f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
254 '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
255 '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
256 '\tEndProjectSection\r\n')
257
258 if isinstance(e, MSVSFolder):
259 if e.items:
260 f.write('\tProjectSection(SolutionItems) = preProject\r\n')
261 for i in e.items:
262 f.write('\t\t%s = %s\r\n' % (i, i))
263 f.write('\tEndProjectSection\r\n')
264
265 if isinstance(e, MSVSProject):
266 if e.dependencies:
267 f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
268 for d in e.dependencies:
269 f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
270 f.write('\tEndProjectSection\r\n')
271
272 f.write('EndProject\r\n')
273
274 # Global section
275 f.write('Global\r\n')
276
277 # Configurations (variants)
278 f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
279 for v in self.variants:
280 f.write('\t\t%s = %s\r\n' % (v, v))
281 f.write('\tEndGlobalSection\r\n')
282
283 # Sort config guids for easier diffing of solution changes.
284 config_guids = []
285 config_guids_overrides = {}
286 for e in all_entries:
287 if isinstance(e, MSVSProject):
288 config_guids.append(e.get_guid())
289 config_guids_overrides[e.get_guid()] = e.config_platform_overrides
290 config_guids.sort()
291
292 f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
293 for g in config_guids:
294 for v in self.variants:
295 nv = config_guids_overrides[g].get(v, v)
296 # Pick which project configuration to build for this solution
297 # configuration.
298 f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
299 g, # Project GUID
300 v, # Solution build configuration
301 nv, # Project build config for that solution config
302 ))
303
304 # Enable project in this solution configuration.
305 f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
306 g, # Project GUID
307 v, # Solution build configuration
308 nv, # Project build config for that solution config
309 ))
310 f.write('\tEndGlobalSection\r\n')
311
312 # TODO(rspangler): Should be able to configure this stuff too (though I've
313 # never seen this be any different)
314 f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
315 f.write('\t\tHideSolutionNode = FALSE\r\n')
316 f.write('\tEndGlobalSection\r\n')
317
318 # Folder mappings
319 # TODO(rspangler): Should omit this section if there are no folders
320 f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
321 for e in all_entries:
322 if not isinstance(e, MSVSFolder):
323 continue # Does not apply to projects, only folders
324 for subentry in e.entries:
325 f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
326 f.write('\tEndGlobalSection\r\n')
327
328 f.write('EndGlobal\r\n')
329
330 f.close()
+0
-244
mozc_build_tools/gyp/pylib/gyp/MSVSProject.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11 import MSVSNew
12
13 #------------------------------------------------------------------------------
14
15
16 class Tool(object):
17 """Visual Studio tool."""
18
19 def __init__(self, name, attrs=None):
20 """Initializes the tool.
21
22 Args:
23 name: Tool name.
24 attrs: Dict of tool attributes; may be None.
25 """
26 self.name = name
27 self.attrs = attrs or {}
28
29 def CreateElement(self, doc):
30 """Creates an element for the tool.
31
32 Args:
33 doc: xml.dom.Document object to use for node creation.
34
35 Returns:
36 A new xml.dom.Element for the tool.
37 """
38 node = doc.createElement('Tool')
39 node.setAttribute('Name', self.name)
40 for k, v in self.attrs.items():
41 node.setAttribute(k, v)
42 return node
43
44
45 class Filter(object):
46 """Visual Studio filter - that is, a virtual folder."""
47
48 def __init__(self, name, contents=None):
49 """Initializes the folder.
50
51 Args:
52 name: Filter (folder) name.
53 contents: List of filenames and/or Filter objects contained.
54 """
55 self.name = name
56 self.contents = list(contents or [])
57
58
59 #------------------------------------------------------------------------------
60
61
62 class Writer(object):
63 """Visual Studio XML project writer."""
64
65 def __init__(self, project_path, version):
66 """Initializes the project.
67
68 Args:
69 project_path: Path to the project file.
70 version: Format version to emit.
71 """
72 self.project_path = project_path
73 self.doc = None
74 self.version = version
75
76 def Create(self, name, guid=None, platforms=None):
77 """Creates the project document.
78
79 Args:
80 name: Name of the project.
81 guid: GUID to use for project, if not None.
82 """
83 self.name = name
84 self.guid = guid or MSVSNew.MakeGuid(self.project_path)
85
86 # Default to Win32 for platforms.
87 if not platforms:
88 platforms = ['Win32']
89
90 # Create XML doc
91 xml_impl = xml.dom.getDOMImplementation()
92 self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
93
94 # Add attributes to root element
95 self.n_root = self.doc.documentElement
96 self.n_root.setAttribute('ProjectType', 'Visual C++')
97 self.n_root.setAttribute('Version', self.version.ProjectVersion())
98 self.n_root.setAttribute('Name', self.name)
99 self.n_root.setAttribute('ProjectGUID', self.guid)
100 self.n_root.setAttribute('RootNamespace', self.name)
101 self.n_root.setAttribute('Keyword', 'Win32Proj')
102
103 # Add platform list
104 n_platform = self.doc.createElement('Platforms')
105 self.n_root.appendChild(n_platform)
106 for platform in platforms:
107 n = self.doc.createElement('Platform')
108 n.setAttribute('Name', platform)
109 n_platform.appendChild(n)
110
111 # Add tool files section
112 self.n_tool_files = self.doc.createElement('ToolFiles')
113 self.n_root.appendChild(self.n_tool_files)
114
115 # Add configurations section
116 self.n_configs = self.doc.createElement('Configurations')
117 self.n_root.appendChild(self.n_configs)
118
119 # Add empty References section
120 self.n_root.appendChild(self.doc.createElement('References'))
121
122 # Add files section
123 self.n_files = self.doc.createElement('Files')
124 self.n_root.appendChild(self.n_files)
125 # Keep a dict keyed on filename to speed up access.
126 self.n_files_dict = dict()
127
128 # Add empty Globals section
129 self.n_root.appendChild(self.doc.createElement('Globals'))
130
131 def AddToolFile(self, path):
132 """Adds a tool file to the project.
133
134 Args:
135 path: Relative path from project to tool file.
136 """
137 n_tool = self.doc.createElement('ToolFile')
138 n_tool.setAttribute('RelativePath', path)
139 self.n_tool_files.appendChild(n_tool)
140
141 def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
142 tools=None):
143 """Adds a configuration to the parent node.
144
145 Args:
146 parent: Destination node.
147 config_type: Type of configuration node.
148 config_name: Configuration name.
149 attrs: Dict of configuration attributes; may be None.
150 tools: List of tools (strings or Tool objects); may be None.
151 """
152 # Handle defaults
153 if not attrs:
154 attrs = {}
155 if not tools:
156 tools = []
157
158 # Add configuration node and its attributes
159 n_config = self.doc.createElement(config_type)
160 n_config.setAttribute('Name', config_name)
161 for k, v in attrs.items():
162 n_config.setAttribute(k, v)
163 parent.appendChild(n_config)
164
165 # Add tool nodes and their attributes
166 if tools:
167 for t in tools:
168 if isinstance(t, Tool):
169 n_config.appendChild(t.CreateElement(self.doc))
170 else:
171 n_config.appendChild(Tool(t).CreateElement(self.doc))
172
173 def AddConfig(self, name, attrs=None, tools=None):
174 """Adds a configuration to the project.
175
176 Args:
177 name: Configuration name.
178 attrs: Dict of configuration attributes; may be None.
179 tools: List of tools (strings or Tool objects); may be None.
180 """
181 self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
182
183 def _AddFilesToNode(self, parent, files):
184 """Adds files and/or filters to the parent node.
185
186 Args:
187 parent: Destination node
188 files: A list of Filter objects and/or relative paths to files.
189
190 Will call itself recursively, if the files list contains Filter objects.
191 """
192 for f in files:
193 if isinstance(f, Filter):
194 node = self.doc.createElement('Filter')
195 node.setAttribute('Name', f.name)
196 self._AddFilesToNode(node, f.contents)
197 else:
198 node = self.doc.createElement('File')
199 node.setAttribute('RelativePath', f)
200 self.n_files_dict[f] = node
201 parent.appendChild(node)
202
203 def AddFiles(self, files):
204 """Adds files to the project.
205
206 Args:
207 files: A list of Filter objects and/or relative paths to files.
208
209 This makes a copy of the file/filter tree at the time of this call. If you
210 later add files to a Filter object which was passed into a previous call
211 to AddFiles(), it will not be reflected in this project.
212 """
213 self._AddFilesToNode(self.n_files, files)
214 # TODO(rspangler) This also doesn't handle adding files to an existing
215 # filter. That is, it doesn't merge the trees.
216
217 def AddFileConfig(self, path, config, attrs=None, tools=None):
218 """Adds a configuration to a file.
219
220 Args:
221 path: Relative path to the file.
222 config: Name of configuration to add.
223 attrs: Dict of configuration attributes; may be None.
224 tools: List of tools (strings or Tool objects); may be None.
225
226 Raises:
227 ValueError: Relative path does not match any file added via AddFiles().
228 """
229 # Find the file node with the right relative path
230 parent = self.n_files_dict.get(path)
231 if not parent:
232 raise ValueError('AddFileConfig: file "%s" not in project.' % path)
233
234 # Add the config to the file node
235 self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
236
237 def Write(self, writer=common.WriteOnDiff):
238 """Writes the project file."""
239 f = writer(self.project_path)
240 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
241 f.close()
242
243 #------------------------------------------------------------------------------
+0
-79
mozc_build_tools/gyp/pylib/gyp/MSVSToolFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11
12
13 #------------------------------------------------------------------------------
14
15
16 class Writer(object):
17 """Visual Studio XML tool file writer."""
18
19 def __init__(self, tool_file_path):
20 """Initializes the tool file.
21
22 Args:
23 tool_file_path: Path to the tool file.
24 """
25 self.tool_file_path = tool_file_path
26 self.doc = None
27
28 def Create(self, name):
29 """Creates the tool file document.
30
31 Args:
32 name: Name of the tool file.
33 """
34 self.name = name
35
36 # Create XML doc
37 xml_impl = xml.dom.getDOMImplementation()
38 self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
39
40 # Add attributes to root element
41 self.n_root = self.doc.documentElement
42 self.n_root.setAttribute('Version', '8.00')
43 self.n_root.setAttribute('Name', self.name)
44
45 # Add rules section
46 self.n_rules = self.doc.createElement('Rules')
47 self.n_root.appendChild(self.n_rules)
48
49 def AddCustomBuildRule(self, name, cmd, description,
50 additional_dependencies,
51 outputs, extensions):
52 """Adds a rule to the tool file.
53
54 Args:
55 name: Name of the rule.
56 description: Description of the rule.
57 cmd: Command line of the rule.
58 additional_dependencies: other files which may trigger the rule.
59 outputs: outputs of the rule.
60 extensions: extensions handled by the rule.
61 """
62 n_rule = self.doc.createElement('CustomBuildRule')
63 n_rule.setAttribute('Name', name)
64 n_rule.setAttribute('ExecutionDescription', description)
65 n_rule.setAttribute('CommandLine', cmd)
66 n_rule.setAttribute('Outputs', ';'.join(outputs))
67 n_rule.setAttribute('FileExtensions', ';'.join(extensions))
68 n_rule.setAttribute('AdditionalDependencies',
69 ';'.join(additional_dependencies))
70 self.n_rules.appendChild(n_rule)
71
72 def Write(self, writer=common.WriteOnDiff):
73 """Writes the tool file."""
74 f = writer(self.tool_file_path)
75 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
76 f.close()
77
78 #------------------------------------------------------------------------------
+0
-182
mozc_build_tools/gyp/pylib/gyp/MSVSUserFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio user preferences file writer."""
7
8 import common
9 import os
10 import re
11 import socket # for gethostname
12 import xml.dom
13 import xml.dom.minidom
14
15
16 #------------------------------------------------------------------------------
17
18 def _FindCommandInPath(command):
19 """If there are no slashes in the command given, this function
20 searches the PATH env to find the given command, and converts it
21 to an absolute path. We have to do this because MSVS is looking
22 for an actual file to launch a debugger on, not just a command
23 line. Note that this happens at GYP time, so anything needing to
24 be built needs to have a full path."""
25 if '/' in command or '\\' in command:
26 # If the command already has path elements (either relative or
27 # absolute), then assume it is constructed properly.
28 return command
29 else:
30 # Search through the path list and find an existing file that
31 # we can access.
32 paths = os.environ.get('PATH','').split(os.pathsep)
33 for path in paths:
34 item = os.path.join(path, command)
35 if os.path.isfile(item) and os.access(item, os.X_OK):
36 return item
37 return command
38
39 def _QuoteWin32CommandLineArgs(args):
40 new_args = []
41 for arg in args:
42 # Replace all double-quotes with double-double-quotes to escape
43 # them for cmd shell, and then quote the whole thing if there
44 # are any.
45 if arg.find('"') != -1:
46 arg = '""'.join(arg.split('"'))
47 arg = '"%s"' % arg
48
49 # Otherwise, if there are any spaces, quote the whole arg.
50 elif re.search(r'[ \t\n]', arg):
51 arg = '"%s"' % arg
52 new_args.append(arg)
53 return new_args
54
55 class Writer(object):
56 """Visual Studio XML user user file writer."""
57
58 def __init__(self, user_file_path, version):
59 """Initializes the user file.
60
61 Args:
62 user_file_path: Path to the user file.
63 """
64 self.user_file_path = user_file_path
65 self.version = version
66 self.doc = None
67
68 def Create(self, name):
69 """Creates the user file document.
70
71 Args:
72 name: Name of the user file.
73 """
74 self.name = name
75
76 # Create XML doc
77 xml_impl = xml.dom.getDOMImplementation()
78 self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
79
80 # Add attributes to root element
81 self.n_root = self.doc.documentElement
82 self.n_root.setAttribute('Version', self.version.ProjectVersion())
83 self.n_root.setAttribute('Name', self.name)
84
85 # Add configurations section
86 self.n_configs = self.doc.createElement('Configurations')
87 self.n_root.appendChild(self.n_configs)
88
89 def _AddConfigToNode(self, parent, config_type, config_name):
90 """Adds a configuration to the parent node.
91
92 Args:
93 parent: Destination node.
94 config_type: Type of configuration node.
95 config_name: Configuration name.
96 """
97 # Add configuration node and its attributes
98 n_config = self.doc.createElement(config_type)
99 n_config.setAttribute('Name', config_name)
100 parent.appendChild(n_config)
101
102 def AddConfig(self, name):
103 """Adds a configuration to the project.
104
105 Args:
106 name: Configuration name.
107 """
108 self._AddConfigToNode(self.n_configs, 'Configuration', name)
109
110
111 def AddDebugSettings(self, config_name, command, environment = {},
112 working_directory=""):
113 """Adds a DebugSettings node to the user file for a particular config.
114
115 Args:
116 command: command line to run. First element in the list is the
117 executable. All elements of the command will be quoted if
118 necessary.
119 working_directory: other files which may trigger the rule. (optional)
120 """
121 command = _QuoteWin32CommandLineArgs(command)
122
123 n_cmd = self.doc.createElement('DebugSettings')
124 abs_command = _FindCommandInPath(command[0])
125 n_cmd.setAttribute('Command', abs_command)
126 n_cmd.setAttribute('WorkingDirectory', working_directory)
127 n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
128 n_cmd.setAttribute('RemoteMachine', socket.gethostname())
129
130 if environment and isinstance(environment, dict):
131 n_cmd.setAttribute('Environment',
132 " ".join(['%s="%s"' % (key, val)
133 for (key,val) in environment.iteritems()]))
134 else:
135 n_cmd.setAttribute('Environment', '')
136
137 n_cmd.setAttribute('EnvironmentMerge', 'true')
138
139 # Currently these are all "dummy" values that we're just setting
140 # in the default manner that MSVS does it. We could use some of
141 # these to add additional capabilities, I suppose, but they might
142 # not have parity with other platforms then.
143 n_cmd.setAttribute('Attach', 'false')
144 n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
145 n_cmd.setAttribute('Remote', '1')
146 n_cmd.setAttribute('RemoteCommand', '')
147 n_cmd.setAttribute('HttpUrl', '')
148 n_cmd.setAttribute('PDBPath', '')
149 n_cmd.setAttribute('SQLDebugging', '')
150 n_cmd.setAttribute('DebuggerFlavor', '0')
151 n_cmd.setAttribute('MPIRunCommand', '')
152 n_cmd.setAttribute('MPIRunArguments', '')
153 n_cmd.setAttribute('MPIRunWorkingDirectory', '')
154 n_cmd.setAttribute('ApplicationCommand', '')
155 n_cmd.setAttribute('ApplicationArguments', '')
156 n_cmd.setAttribute('ShimCommand', '')
157 n_cmd.setAttribute('MPIAcceptMode', '')
158 n_cmd.setAttribute('MPIAcceptFilter', '')
159
160 # Find the config, and add it if it doesn't exist.
161 found = False
162 for config in self.n_configs.childNodes:
163 if config.getAttribute("Name") == config_name:
164 found = True
165
166 if not found:
167 self.AddConfig(config_name)
168
169 # Add the DebugSettings onto the appropriate config.
170 for config in self.n_configs.childNodes:
171 if config.getAttribute("Name") == config_name:
172 config.appendChild(n_cmd)
173 break
174
175 def Write(self, writer=common.WriteOnDiff):
176 """Writes the user file."""
177 f = writer(self.user_file_path)
178 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
179 f.close()
180
181 #------------------------------------------------------------------------------
+0
-151
mozc_build_tools/gyp/pylib/gyp/MSVSVersion.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Handle version information related to Visual Stuio."""
7
8 import os
9 import re
10 import subprocess
11 import sys
12
13
14 class VisualStudioVersion:
15 """Information regarding a version of Visual Studio."""
16
17 def __init__(self, short_name, description,
18 solution_version, project_version, flat_sln):
19 self.short_name = short_name
20 self.description = description
21 self.solution_version = solution_version
22 self.project_version = project_version
23 self.flat_sln = flat_sln
24
25 def ShortName(self):
26 return self.short_name
27
28 def Description(self):
29 """Get the full description of the version."""
30 return self.description
31
32 def SolutionVersion(self):
33 """Get the version number of the sln files."""
34 return self.solution_version
35
36 def ProjectVersion(self):
37 """Get the version number of the vcproj files."""
38 return self.project_version
39
40 def FlatSolution(self):
41 return self.flat_sln
42
43
44 def _RegistryGetValue(key, value):
45 """Use reg.exe to read a paricular key.
46
47 While ideally we might use the win32 module, we would like gyp to be
48 python neutral, so for instance cygwin python lacks this module.
49
50 Arguments:
51 key: The registry key to read from.
52 value: The particular value to read.
53 Return:
54 The contents there, or None for failure.
55 """
56 # Skip if not on Windows.
57 if sys.platform not in ('win32', 'cygwin'):
58 return None
59 # Run reg.exe.
60 cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
61 'query', key, '/v', value]
62 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
63 text = p.communicate()[0]
64 # Require a successful return value.
65 if p.returncode:
66 return None
67 # Extract value.
68 match = re.search(r'REG_\w+[ ]+([^\r]+)\r\n', text)
69 if not match:
70 return None
71 return match.group(1)
72
73
74 def _CreateVersion(name):
75 versions = {
76 '2008': VisualStudioVersion('2008',
77 'Visual Studio 2008',
78 solution_version='10.00',
79 project_version='9.00',
80 flat_sln=False),
81 '2008e': VisualStudioVersion('2008e',
82 'Visual Studio 2008',
83 solution_version='10.00',
84 project_version='9.00',
85 flat_sln=True),
86 '2005': VisualStudioVersion('2005',
87 'Visual Studio 2005',
88 solution_version='9.00',
89 project_version='8.00',
90 flat_sln=False),
91 '2005e': VisualStudioVersion('2005e',
92 'Visual Studio 2005',
93 solution_version='9.00',
94 project_version='8.00',
95 flat_sln=True),
96 }
97 return versions[str(name)]
98
99
100 def _DetectVisualStudioVersions():
101 """Collect the list of installed visual studio versions.
102
103 Returns:
104 A list of visual studio versions installed in descending order of
105 usage preference.
106 Base this on the registry and a quick check if devenv.exe exists.
107 Only versions 8-9 are considered.
108 Possibilities are:
109 2005 - Visual Studio 2005 (8)
110 2008 - Visual Studio 2008 (9)
111 """
112 version_to_year = {'8.0': '2005', '9.0': '2008'}
113 versions = []
114 for version in ('9.0', '8.0'):
115 # Get the install dir for this version.
116 key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version
117 path = _RegistryGetValue(key, 'InstallDir')
118 if not path:
119 continue
120 # Check for full.
121 if os.path.exists(os.path.join(path, 'devenv.exe')):
122 # Add this one.
123 versions.append(_CreateVersion(version_to_year[version]))
124 # Check for express.
125 elif os.path.exists(os.path.join(path, 'vcexpress.exe')):
126 # Add this one.
127 versions.append(_CreateVersion(version_to_year[version] + 'e'))
128 return versions
129
130
131 def SelectVisualStudioVersion(version='auto'):
132 """Select which version of Visual Studio projects to generate.
133
134 Arguments:
135 version: Hook to allow caller to force a particular version (vs auto).
136 Returns:
137 An object representing a visual studio project format version.
138 """
139 # In auto mode, check environment variable for override.
140 if version == 'auto':
141 version = os.environ.get('GYP_MSVS_VERSION', 'auto')
142 # In auto mode, pick the most preferred version present.
143 if version == 'auto':
144 versions = _DetectVisualStudioVersions()
145 if not versions:
146 # Default to 2005.
147 return _CreateVersion('2005')
148 return versions[0]
149 # Convert version string into a version object.
150 return _CreateVersion(version)
+0
-200
mozc_build_tools/gyp/pylib/gyp/SCons.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 SCons generator.
8
9 This contains class definitions and supporting functions for generating
10 pieces of SCons files for the different types of GYP targets.
11 """
12
13 import os
14
15
16 def WriteList(fp, list, prefix='',
17 separator=',\n ',
18 preamble=None,
19 postamble=None):
20 fp.write(preamble or '')
21 fp.write((separator or ' ').join([prefix + l for l in list]))
22 fp.write(postamble or '')
23
24
25 class TargetBase(object):
26 """
27 Base class for a SCons representation of a GYP target.
28 """
29 is_ignored = False
30 target_prefix = ''
31 target_suffix = ''
32 def __init__(self, spec):
33 self.spec = spec
34 def full_product_name(self):
35 """
36 Returns the full name of the product being built:
37
38 * Uses 'product_name' if it's set, else prefix + 'target_name'.
39 * Prepends 'product_dir' if set.
40 * Appends SCons suffix variables for the target type (or
41 product_extension).
42 """
43 suffix = self.target_suffix
44 product_extension = self.spec.get('product_extension')
45 if product_extension:
46 suffix = '.' + product_extension
47 prefix = self.spec.get('product_prefix', self.target_prefix)
48 name = self.spec['target_name']
49 name = prefix + self.spec.get('product_name', name) + suffix
50 product_dir = self.spec.get('product_dir')
51 if product_dir:
52 name = os.path.join(product_dir, name)
53 else:
54 name = os.path.join(self.out_dir, name)
55 return name
56
57 def write_input_files(self, fp):
58 """
59 Writes the definition of the input files (sources).
60 """
61 sources = self.spec.get('sources')
62 if not sources:
63 fp.write('\ninput_files = []\n')
64 return
65 preamble = '\ninput_files = [\n '
66 postamble = ',\n]\n'
67 WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
68
69 def builder_call(self):
70 """
71 Returns the actual SCons builder call to build this target.
72 """
73 name = self.full_product_name()
74 return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
75 def write_target(self, fp, src_dir='', pre=''):
76 """
77 Writes the lines necessary to build this target.
78 """
79 fp.write('\n' + pre)
80 fp.write('_outputs = %s\n' % self.builder_call())
81 fp.write('target_files.extend(_outputs)\n')
82
83
84 class NoneTarget(TargetBase):
85 """
86 A GYP target type of 'none', implicitly or explicitly.
87 """
88 def write_target(self, fp, pre=''):
89 fp.write('\ntarget_files.extend(input_files)\n')
90
91
92 class SettingsTarget(TargetBase):
93 """
94 A GYP target type of 'settings'.
95 """
96 is_ignored = True
97
98
99 compilable_sources_template = """
100 _result = []
101 for infile in input_files:
102 if env.compilable(infile):
103 if (type(infile) == type('')
104 and (infile.startswith(%(src_dir)r)
105 or not os.path.isabs(env.subst(infile)))):
106 # Force files below the build directory by replacing all '..'
107 # elements in the path with '__':
108 base, ext = os.path.splitext(os.path.normpath(infile))
109 base = [d == '..' and '__' or d for d in base.split('/')]
110 base = os.path.join(*base)
111 object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
112 if not infile.startswith(%(src_dir)r):
113 infile = %(src_dir)r + infile
114 infile = env.%(name)s(object, infile)[0]
115 else:
116 infile = env.%(name)s(infile)[0]
117 _result.append(infile)
118 input_files = _result
119 """
120
121 class CompilableSourcesTargetBase(TargetBase):
122 """
123 An abstract base class for targets that compile their source files.
124
125 We explicitly transform compilable files into object files,
126 even though SCons could infer that for us, because we want
127 to control where the object file ends up. (The implicit rules
128 in SCons always put the object file next to the source file.)
129 """
130 intermediate_builder_name = None
131 def write_target(self, fp, src_dir='', pre=''):
132 if self.intermediate_builder_name is None:
133 raise NotImplementedError
134 if src_dir and not src_dir.endswith('/'):
135 src_dir += '/'
136 variables = {
137 'src_dir': src_dir,
138 'name': self.intermediate_builder_name,
139 }
140 fp.write(compilable_sources_template % variables)
141 super(CompilableSourcesTargetBase, self).write_target(fp)
142
143
144 class ProgramTarget(CompilableSourcesTargetBase):
145 """
146 A GYP target type of 'executable'.
147 """
148 builder_name = 'GypProgram'
149 intermediate_builder_name = 'StaticObject'
150 target_prefix = '${PROGPREFIX}'
151 target_suffix = '${PROGSUFFIX}'
152 out_dir = '${TOP_BUILDDIR}'
153
154
155 class StaticLibraryTarget(CompilableSourcesTargetBase):
156 """
157 A GYP target type of 'static_library'.
158 """
159 builder_name = 'GypStaticLibrary'
160 intermediate_builder_name = 'StaticObject'
161 target_prefix = '${LIBPREFIX}'
162 target_suffix = '${LIBSUFFIX}'
163 out_dir = '${LIB_DIR}'
164
165
166 class SharedLibraryTarget(CompilableSourcesTargetBase):
167 """
168 A GYP target type of 'shared_library'.
169 """
170 builder_name = 'GypSharedLibrary'
171 intermediate_builder_name = 'SharedObject'
172 target_prefix = '${SHLIBPREFIX}'
173 target_suffix = '${SHLIBSUFFIX}'
174 out_dir = '${LIB_DIR}'
175
176
177 class LoadableModuleTarget(CompilableSourcesTargetBase):
178 """
179 A GYP target type of 'loadable_module'.
180 """
181 builder_name = 'GypLoadableModule'
182 intermediate_builder_name = 'SharedObject'
183 target_prefix = '${SHLIBPREFIX}'
184 target_suffix = '${SHLIBSUFFIX}'
185 out_dir = '${TOP_BUILDDIR}'
186
187
188 TargetMap = {
189 None : NoneTarget,
190 'none' : NoneTarget,
191 'settings' : SettingsTarget,
192 'executable' : ProgramTarget,
193 'static_library' : StaticLibraryTarget,
194 'shared_library' : SharedLibraryTarget,
195 'loadable_module' : LoadableModuleTarget,
196 }
197
198 def Target(spec):
199 return TargetMap[spec.get('type')](spec)
+0
-453
mozc_build_tools/gyp/pylib/gyp/__init__.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import copy
7 import gyp.input
8 import optparse
9 import os.path
10 import re
11 import shlex
12 import sys
13
14 # Default debug modes for GYP
15 debug = {}
16
17 # List of "official" debug modes, but you can use anything you like.
18 DEBUG_GENERAL = 'general'
19 DEBUG_VARIABLES = 'variables'
20 DEBUG_INCLUDES = 'includes'
21
22 def DebugOutput(mode, message):
23 if mode in gyp.debug.keys():
24 print "%s: %s" % (mode.upper(), message)
25
26 def FindBuildFiles():
27 extension = '.gyp'
28 files = os.listdir(os.getcwd())
29 build_files = []
30 for file in files:
31 if file[-len(extension):] == extension:
32 build_files.append(file)
33 return build_files
34
35
36 def Load(build_files, format, default_variables={},
37 includes=[], depth='.', params={}, check=False, circular_check=True):
38 """
39 Loads one or more specified build files.
40 default_variables and includes will be copied before use.
41 Returns the generator for the specified format and the
42 data returned by loading the specified build files.
43 """
44 default_variables = copy.copy(default_variables)
45
46 # Default variables provided by this program and its modules should be
47 # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
48 # avoiding collisions with user and automatic variables.
49 default_variables['GENERATOR'] = format
50
51 generator_name = 'gyp.generator.' + format
52 # These parameters are passed in order (as opposed to by key)
53 # because ActivePython cannot handle key parameters to __import__.
54 generator = __import__(generator_name, globals(), locals(), generator_name)
55 for (key, val) in generator.generator_default_variables.items():
56 default_variables.setdefault(key, val)
57
58 # Give the generator the opportunity to set additional variables based on
59 # the params it will receive in the output phase.
60 if getattr(generator, 'CalculateVariables', None):
61 generator.CalculateVariables(default_variables, params)
62
63 # Fetch the generator specific info that gets fed to input, we use getattr
64 # so we can default things and the generators only have to provide what
65 # they need.
66 generator_input_info = {
67 'generator_wants_absolute_build_file_paths':
68 getattr(generator, 'generator_wants_absolute_build_file_paths', False),
69 'generator_handles_variants':
70 getattr(generator, 'generator_handles_variants', False),
71 'non_configuration_keys':
72 getattr(generator, 'generator_additional_non_configuration_keys', []),
73 'path_sections':
74 getattr(generator, 'generator_additional_path_sections', []),
75 'extra_sources_for_rules':
76 getattr(generator, 'generator_extra_sources_for_rules', []),
77 'generator_supports_multiple_toolsets':
78 getattr(generator, 'generator_supports_multiple_toolsets', False),
79 }
80
81 # Process the input specific to this generator.
82 result = gyp.input.Load(build_files, default_variables, includes[:],
83 depth, generator_input_info, check, circular_check)
84 return [generator] + result
85
86 def NameValueListToDict(name_value_list):
87 """
88 Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
89 of the pairs. If a string is simply NAME, then the value in the dictionary
90 is set to True. If VALUE can be converted to an integer, it is.
91 """
92 result = { }
93 for item in name_value_list:
94 tokens = item.split('=', 1)
95 if len(tokens) == 2:
96 # If we can make it an int, use that, otherwise, use the string.
97 try:
98 token_value = int(tokens[1])
99 except ValueError:
100 token_value = tokens[1]
101 # Set the variable to the supplied value.
102 result[tokens[0]] = token_value
103 else:
104 # No value supplied, treat it as a boolean and set it.
105 result[tokens[0]] = True
106 return result
107
108 def ShlexEnv(env_name):
109 flags = os.environ.get(env_name, [])
110 if flags:
111 flags = shlex.split(flags)
112 return flags
113
114 def FormatOpt(opt, value):
115 if opt.startswith('--'):
116 return '%s=%s' % (opt, value)
117 return opt + value
118
119 def RegenerateAppendFlag(flag, values, predicate, env_name, options):
120 """Regenerate a list of command line flags, for an option of action='append'.
121
122 The |env_name|, if given, is checked in the environment and used to generate
123 an initial list of options, then the options that were specified on the
124 command line (given in |values|) are appended. This matches the handling of
125 environment variables and command line flags where command line flags override
126 the environment, while not requiring the environment to be set when the flags
127 are used again.
128 """
129 flags = []
130 if options.use_environment and env_name:
131 for flag_value in ShlexEnv(env_name):
132 flags.append(FormatOpt(flag, predicate(flag_value)))
133 if values:
134 for flag_value in values:
135 flags.append(FormatOpt(flag, predicate(flag_value)))
136 return flags
137
138 def RegenerateFlags(options):
139 """Given a parsed options object, and taking the environment variables into
140 account, returns a list of flags that should regenerate an equivalent options
141 object (even in the absence of the environment variables.)
142
143 Any path options will be normalized relative to depth.
144
145 The format flag is not included, as it is assumed the calling generator will
146 set that as appropriate.
147 """
148 def FixPath(path):
149 path = gyp.common.FixIfRelativePath(path, options.depth)
150 if not path:
151 return os.path.curdir
152 return path
153
154 def Noop(value):
155 return value
156
157 # We always want to ignore the environment when regenerating, to avoid
158 # duplicate or changed flags in the environment at the time of regeneration.
159 flags = ['--ignore-environment']
160 for name, metadata in options._regeneration_metadata.iteritems():
161 opt = metadata['opt']
162 value = getattr(options, name)
163 value_predicate = metadata['type'] == 'path' and FixPath or Noop
164 action = metadata['action']
165 env_name = metadata['env_name']
166 if action == 'append':
167 flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
168 env_name, options))
169 elif action in ('store', None): # None is a synonym for 'store'.
170 if value:
171 flags.append(FormatOpt(opt, value_predicate(value)))
172 elif options.use_environment and env_name and os.environ.get(env_name):
173 flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
174 elif action in ('store_true', 'store_false'):
175 if ((action == 'store_true' and value) or
176 (action == 'store_false' and not value)):
177 flags.append(opt)
178 elif options.use_environment and env_name:
179 print >>sys.stderr, ('Warning: environment regeneration unimplemented '
180 'for %s flag %r env_name %r' % (action, opt,
181 env_name))
182 else:
183 print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
184 'flag %r' % (action, opt))
185
186 return flags
187
188 class RegeneratableOptionParser(optparse.OptionParser):
189 def __init__(self):
190 self.__regeneratable_options = {}
191 optparse.OptionParser.__init__(self)
192
193 def add_option(self, *args, **kw):
194 """Add an option to the parser.
195
196 This accepts the same arguments as OptionParser.add_option, plus the
197 following:
198 regenerate: can be set to False to prevent this option from being included
199 in regeneration.
200 env_name: name of environment variable that additional values for this
201 option come from.
202 type: adds type='path', to tell the regenerator that the values of
203 this option need to be made relative to options.depth
204 """
205 env_name = kw.pop('env_name', None)
206 if 'dest' in kw and kw.pop('regenerate', True):
207 dest = kw['dest']
208
209 # The path type is needed for regenerating, for optparse we can just treat
210 # it as a string.
211 type = kw.get('type')
212 if type == 'path':
213 kw['type'] = 'string'
214
215 self.__regeneratable_options[dest] = {
216 'action': kw.get('action'),
217 'type': type,
218 'env_name': env_name,
219 'opt': args[0],
220 }
221
222 optparse.OptionParser.add_option(self, *args, **kw)
223
224 def parse_args(self, *args):
225 values, args = optparse.OptionParser.parse_args(self, *args)
226 values._regeneration_metadata = self.__regeneratable_options
227 return values, args
228
229 def main(args):
230 my_name = os.path.basename(sys.argv[0])
231
232 parser = RegeneratableOptionParser()
233 usage = 'usage: %s [options ...] [build_file ...]'
234 parser.set_usage(usage.replace('%s', '%prog'))
235 parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
236 env_name='GYP_DEFINES',
237 help='sets variable VAR to value VAL')
238 parser.add_option('-f', '--format', dest='formats', action='append',
239 env_name='GYP_GENERATORS', regenerate=False,
240 help='output formats to generate')
241 parser.add_option('--msvs-version', dest='msvs_version',
242 regenerate=False,
243 help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
244 parser.add_option('-I', '--include', dest='includes', action='append',
245 metavar='INCLUDE', type='path',
246 help='files to include in all loaded .gyp files')
247 parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
248 help='set DEPTH gyp variable to a relative path to PATH')
249 parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
250 action='append', default=[], help='turn on a debugging '
251 'mode for debugging GYP. Supported modes are "variables" '
252 'and "general"')
253 parser.add_option('-S', '--suffix', dest='suffix', default='',
254 help='suffix to add to generated files')
255 parser.add_option('-G', dest='generator_flags', action='append', default=[],
256 metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
257 help='sets generator flag FLAG to VAL')
258 parser.add_option('--generator-output', dest='generator_output',
259 action='store', default=None, metavar='DIR', type='path',
260 env_name='GYP_GENERATOR_OUTPUT',
261 help='puts generated build files under DIR')
262 parser.add_option('--ignore-environment', dest='use_environment',
263 action='store_false', default=True, regenerate=False,
264 help='do not read options from environment variables')
265 parser.add_option('--check', dest='check', action='store_true',
266 help='check format of gyp files')
267 # --no-circular-check disables the check for circular relationships between
268 # .gyp files. These relationships should not exist, but they've only been
269 # observed to be harmful with the Xcode generator. Chromium's .gyp files
270 # currently have some circular relationships on non-Mac platforms, so this
271 # option allows the strict behavior to be used on Macs and the lenient
272 # behavior to be used elsewhere.
273 # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
274 parser.add_option('--no-circular-check', dest='circular_check',
275 action='store_false', default=True, regenerate=False,
276 help="don't check for circular relationships between files")
277
278 # We read a few things from ~/.gyp, so set up a var for that.
279 home_vars = ['HOME']
280 if sys.platform in ('cygwin', 'win32'):
281 home_vars.append('USERPROFILE')
282 home = None
283 for home_var in home_vars:
284 home = os.getenv(home_var)
285 if home != None:
286 break
287 home_dot_gyp = None
288 if home != None:
289 home_dot_gyp = os.path.join(home, '.gyp')
290 if not os.path.exists(home_dot_gyp):
291 home_dot_gyp = None
292
293 # TODO(thomasvl): add support for ~/.gyp/defaults
294
295 (options, build_files_arg) = parser.parse_args(args)
296 build_files = build_files_arg
297
298 if not options.formats:
299 # If no format was given on the command line, then check the env variable.
300 generate_formats = []
301 if options.use_environment:
302 generate_formats = os.environ.get('GYP_GENERATORS', [])
303 if generate_formats:
304 generate_formats = re.split('[\s,]', generate_formats)
305 if generate_formats:
306 options.formats = generate_formats
307 else:
308 # Nothing in the variable, default based on platform.
309 options.formats = [ {'darwin': 'xcode',
310 'win32': 'msvs',
311 'cygwin': 'msvs',
312 'freebsd7': 'make',
313 'freebsd8': 'make',
314 'linux2': 'make',
315 'openbsd4': 'make',
316 'sunos5': 'make',}[sys.platform] ]
317
318 if not options.generator_output and options.use_environment:
319 g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
320 if g_o:
321 options.generator_output = g_o
322
323 for mode in options.debug:
324 gyp.debug[mode] = 1
325
326 # Do an extra check to avoid work when we're not debugging.
327 if DEBUG_GENERAL in gyp.debug.keys():
328 DebugOutput(DEBUG_GENERAL, 'running with these options:')
329 for (option, value) in options.__dict__.items():
330 if option[0] == '_':
331 continue
332 if isinstance(value, basestring):
333 DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
334 else:
335 DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
336
337 if not build_files:
338 build_files = FindBuildFiles()
339 if not build_files:
340 print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
341 (my_name, my_name)
342 return 1
343
344 # TODO(mark): Chromium-specific hack!
345 # For Chromium, the gyp "depth" variable should always be a relative path
346 # to Chromium's top-level "src" directory. If no depth variable was set
347 # on the command line, try to find a "src" directory by looking at the
348 # absolute path to each build file's directory. The first "src" component
349 # found will be treated as though it were the path used for --depth.
350 if not options.depth:
351 for build_file in build_files:
352 build_file_dir = os.path.abspath(os.path.dirname(build_file))
353 build_file_dir_components = build_file_dir.split(os.path.sep)
354 components_len = len(build_file_dir_components)
355 for index in xrange(components_len - 1, -1, -1):
356 if build_file_dir_components[index] == 'src':
357 options.depth = os.path.sep.join(build_file_dir_components)
358 break
359 del build_file_dir_components[index]
360
361 # If the inner loop found something, break without advancing to another
362 # build file.
363 if options.depth:
364 break
365
366 if not options.depth:
367 raise Exception, \
368 'Could not automatically locate src directory. This is a ' + \
369 'temporary Chromium feature that will be removed. Use ' + \
370 '--depth as a workaround.'
371
372 # -D on the command line sets variable defaults - D isn't just for define,
373 # it's for default. Perhaps there should be a way to force (-F?) a
374 # variable's value so that it can't be overridden by anything else.
375 cmdline_default_variables = {}
376 defines = []
377 if options.use_environment:
378 defines += ShlexEnv('GYP_DEFINES')
379 if options.defines:
380 defines += options.defines
381 cmdline_default_variables = NameValueListToDict(defines)
382 if DEBUG_GENERAL in gyp.debug.keys():
383 DebugOutput(DEBUG_GENERAL,
384 "cmdline_default_variables: %s" % cmdline_default_variables)
385
386 # Set up includes.
387 includes = []
388
389 # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
390 # .gyp file that's loaded, before anything else is included.
391 if home_dot_gyp != None:
392 default_include = os.path.join(home_dot_gyp, 'include.gypi')
393 if os.path.exists(default_include):
394 includes.append(default_include)
395
396 # Command-line --include files come after the default include.
397 if options.includes:
398 includes.extend(options.includes)
399
400 # Generator flags should be prefixed with the target generator since they
401 # are global across all generator runs.
402 gen_flags = []
403 if options.use_environment:
404 gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
405 if options.generator_flags:
406 gen_flags += options.generator_flags
407 generator_flags = NameValueListToDict(gen_flags)
408 if DEBUG_GENERAL in gyp.debug.keys():
409 DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
410
411 # TODO: Remove this and the option after we've gotten folks to move to the
412 # generator flag.
413 if options.msvs_version:
414 print >>sys.stderr, \
415 'DEPRECATED: Use generator flag (-G msvs_version=' + \
416 options.msvs_version + ') instead of --msvs-version=' + \
417 options.msvs_version
418 generator_flags['msvs_version'] = options.msvs_version
419
420 # Generate all requested formats (use a set in case we got one format request
421 # twice)
422 for format in set(options.formats):
423 params = {'options': options,
424 'build_files': build_files,
425 'generator_flags': generator_flags,
426 'cwd': os.getcwd(),
427 'build_files_arg': build_files_arg,
428 'gyp_binary': sys.argv[0],
429 'home_dot_gyp': home_dot_gyp}
430
431 # Start with the default variables from the command line.
432 [generator, flat_list, targets, data] = Load(build_files, format,
433 cmdline_default_variables,
434 includes, options.depth,
435 params, options.check,
436 options.circular_check)
437
438 # TODO(mark): Pass |data| for now because the generator needs a list of
439 # build files that came in. In the future, maybe it should just accept
440 # a list, and not the whole data dict.
441 # NOTE: flat_list is the flattened dependency graph specifying the order
442 # that targets may be built. Build systems that operate serially or that
443 # need to have dependencies defined before dependents reference them should
444 # generate targets in the order specified in flat_list.
445 generator.GenerateOutput(flat_list, targets, data, params)
446
447 # Done
448 return 0
449
450
451 if __name__ == '__main__':
452 sys.exit(main(sys.argv[1:]))
+0
-343
mozc_build_tools/gyp/pylib/gyp/common.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import errno
7 import filecmp
8 import os.path
9 import re
10 import tempfile
11 import sys
12
13 def ExceptionAppend(e, msg):
14 """Append a message to the given exception's message."""
15 if not e.args:
16 e.args = (msg,)
17 elif len(e.args) == 1:
18 e.args = (str(e.args[0]) + ' ' + msg,)
19 else:
20 e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
21
22
23 def ParseQualifiedTarget(target):
24 # Splits a qualified target into a build file, target name and toolset.
25
26 # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
27 target_split = target.rsplit(':', 1)
28 if len(target_split) == 2:
29 [build_file, target] = target_split
30 else:
31 build_file = None
32
33 target_split = target.rsplit('#', 1)
34 if len(target_split) == 2:
35 [target, toolset] = target_split
36 else:
37 toolset = None
38
39 return [build_file, target, toolset]
40
41
42 def ResolveTarget(build_file, target, toolset):
43 # This function resolves a target into a canonical form:
44 # - a fully defined build file, either absolute or relative to the current
45 # directory
46 # - a target name
47 # - a toolset
48 #
49 # build_file is the file relative to which 'target' is defined.
50 # target is the qualified target.
51 # toolset is the default toolset for that target.
52 [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
53
54 if parsed_build_file:
55 if build_file:
56 # If a relative path, parsed_build_file is relative to the directory
57 # containing build_file. If build_file is not in the current directory,
58 # parsed_build_file is not a usable path as-is. Resolve it by
59 # interpreting it as relative to build_file. If parsed_build_file is
60 # absolute, it is usable as a path regardless of the current directory,
61 # and os.path.join will return it as-is.
62 build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
63 parsed_build_file))
64 else:
65 build_file = parsed_build_file
66
67 if parsed_toolset:
68 toolset = parsed_toolset
69
70 return [build_file, target, toolset]
71
72
73 def BuildFile(fully_qualified_target):
74 # Extracts the build file from the fully qualified target.
75 return ParseQualifiedTarget(fully_qualified_target)[0]
76
77
78 def QualifiedTarget(build_file, target, toolset):
79 # "Qualified" means the file that a target was defined in and the target
80 # name, separated by a colon, suffixed by a # and the toolset name:
81 # /path/to/file.gyp:target_name#toolset
82 fully_qualified = build_file + ':' + target
83 if toolset:
84 fully_qualified = fully_qualified + '#' + toolset
85 return fully_qualified
86
87
88 def RelativePath(path, relative_to):
89 # Assuming both |path| and |relative_to| are relative to the current
90 # directory, returns a relative path that identifies path relative to
91 # relative_to.
92
93 # Convert to absolute (and therefore normalized paths).
94 path = os.path.abspath(path)
95 relative_to = os.path.abspath(relative_to)
96
97 # Split the paths into components.
98 path_split = path.split(os.path.sep)
99 relative_to_split = relative_to.split(os.path.sep)
100
101 # Determine how much of the prefix the two paths share.
102 prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
103
104 # Put enough ".." components to back up out of relative_to to the common
105 # prefix, and then append the part of path_split after the common prefix.
106 relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
107 path_split[prefix_len:]
108
109 if len(relative_split) == 0:
110 # The paths were the same.
111 return ''
112
113 # Turn it back into a string and we're done.
114 return os.path.join(*relative_split)
115
116
117 def FixIfRelativePath(path, relative_to):
118 # Like RelativePath but returns |path| unchanged if it is absolute.
119 if os.path.isabs(path):
120 return path
121 return RelativePath(path, relative_to)
122
123
124 def UnrelativePath(path, relative_to):
125 # Assuming that |relative_to| is relative to the current directory, and |path|
126 # is a path relative to the dirname of |relative_to|, returns a path that
127 # identifies |path| relative to the current directory.
128 rel_dir = os.path.dirname(relative_to)
129 return os.path.normpath(os.path.join(rel_dir, path))
130
131
132 # re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
133 # http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
134 # and the documentation for various shells.
135
136 # _quote is a pattern that should match any argument that needs to be quoted
137 # with double-quotes by EncodePOSIXShellArgument. It matches the following
138 # characters appearing anywhere in an argument:
139 # \t, \n, space parameter separators
140 # # comments
141 # $ expansions (quoted to always expand within one argument)
142 # % called out by IEEE 1003.1 XCU.2.2
143 # & job control
144 # ' quoting
145 # (, ) subshell execution
146 # *, ?, [ pathname expansion
147 # ; command delimiter
148 # <, >, | redirection
149 # = assignment
150 # {, } brace expansion (bash)
151 # ~ tilde expansion
152 # It also matches the empty string, because "" (or '') is the only way to
153 # represent an empty string literal argument to a POSIX shell.
154 #
155 # This does not match the characters in _escape, because those need to be
156 # backslash-escaped regardless of whether they appear in a double-quoted
157 # string.
158 _quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
159
160 # _escape is a pattern that should match any character that needs to be
161 # escaped with a backslash, whether or not the argument matched the _quote
162 # pattern. _escape is used with re.sub to backslash anything in _escape's
163 # first match group, hence the (parentheses) in the regular expression.
164 #
165 # _escape matches the following characters appearing anywhere in an argument:
166 # " to prevent POSIX shells from interpreting this character for quoting
167 # \ to prevent POSIX shells from interpreting this character for escaping
168 # ` to prevent POSIX shells from interpreting this character for command
169 # substitution
170 # Missing from this list is $, because the desired behavior of
171 # EncodePOSIXShellArgument is to permit parameter (variable) expansion.
172 #
173 # Also missing from this list is !, which bash will interpret as the history
174 # expansion character when history is enabled. bash does not enable history
175 # by default in non-interactive shells, so this is not thought to be a problem.
176 # ! was omitted from this list because bash interprets "\!" as a literal string
177 # including the backslash character (avoiding history expansion but retaining
178 # the backslash), which would not be correct for argument encoding. Handling
179 # this case properly would also be problematic because bash allows the history
180 # character to be changed with the histchars shell variable. Fortunately,
181 # as history is not enabled in non-interactive shells and
182 # EncodePOSIXShellArgument is only expected to encode for non-interactive
183 # shells, there is no room for error here by ignoring !.
184 _escape = re.compile(r'(["\\`])')
185
186 def EncodePOSIXShellArgument(argument):
187 """Encodes |argument| suitably for consumption by POSIX shells.
188
189 argument may be quoted and escaped as necessary to ensure that POSIX shells
190 treat the returned value as a literal representing the argument passed to
191 this function. Parameter (variable) expansions beginning with $ are allowed
192 to remain intact without escaping the $, to allow the argument to contain
193 references to variables to be expanded by the shell.
194 """
195
196 if not isinstance(argument, str):
197 argument = str(argument)
198
199 if _quote.search(argument):
200 quote = '"'
201 else:
202 quote = ''
203
204 encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
205
206 return encoded
207
208
209 def EncodePOSIXShellList(list):
210 """Encodes |list| suitably for consumption by POSIX shells.
211
212 Returns EncodePOSIXShellArgument for each item in list, and joins them
213 together using the space character as an argument separator.
214 """
215
216 encoded_arguments = []
217 for argument in list:
218 encoded_arguments.append(EncodePOSIXShellArgument(argument))
219 return ' '.join(encoded_arguments)
220
221
222 def DeepDependencyTargets(target_dicts, roots):
223 """Returns the recursive list of target dependencies.
224 """
225 dependencies = set()
226 for r in roots:
227 spec = target_dicts[r]
228 r_deps = list(set((spec.get('dependencies', []) +
229 spec.get('dependencies_original', []))))
230 for d in r_deps:
231 if d not in roots:
232 dependencies.add(d)
233 for d in DeepDependencyTargets(target_dicts, r_deps):
234 if d not in roots:
235 dependencies.add(d)
236 return list(dependencies)
237
238
239 def BuildFileTargets(target_list, build_file):
240 """From a target_list, returns the subset from the specified build_file.
241 """
242 return [p for p in target_list if BuildFile(p) == build_file]
243
244
245 def AllTargets(target_list, target_dicts, build_file):
246 """Returns all targets (direct and dependencies) for the specified build_file.
247 """
248 bftargets = BuildFileTargets(target_list, build_file)
249 deptargets = DeepDependencyTargets(target_dicts, bftargets)
250 return bftargets + deptargets
251
252
253 def WriteOnDiff(filename):
254 """Write to a file only if the new contents differ.
255
256 Arguments:
257 filename: name of the file to potentially write to.
258 Returns:
259 A file like object which will write to temporary file and only overwrite
260 the target if it differs (on close).
261 """
262
263 class Writer:
264 """Wrapper around file which only covers the target if it differs."""
265 def __init__(self):
266 # Pick temporary file.
267 tmp_fd, self.tmp_path = tempfile.mkstemp(
268 suffix='.tmp',
269 prefix=os.path.split(filename)[1] + '.gyp.',
270 dir=os.path.split(filename)[0])
271 try:
272 self.tmp_file = os.fdopen(tmp_fd, 'wb')
273 except Exception:
274 # Don't leave turds behind.
275 os.unlink(self.tmp_path)
276 raise
277
278 def __getattr__(self, attrname):
279 # Delegate everything else to self.tmp_file
280 return getattr(self.tmp_file, attrname)
281
282 def close(self):
283 try:
284 # Close tmp file.
285 self.tmp_file.close()
286 # Determine if different.
287 same = False
288 try:
289 same = filecmp.cmp(self.tmp_path, filename, False)
290 except OSError, e:
291 if e.errno != errno.ENOENT:
292 raise
293
294 if same:
295 # The new file is identical to the old one, just get rid of the new
296 # one.
297 os.unlink(self.tmp_path)
298 else:
299 # The new file is different from the old one, or there is no old one.
300 # Rename the new file to the permanent name.
301 #
302 # tempfile.mkstemp uses an overly restrictive mode, resulting in a
303 # file that can only be read by the owner, regardless of the umask.
304 # There's no reason to not respect the umask here, which means that
305 # an extra hoop is required to fetch it and reset the new file's mode.
306 #
307 # No way to get the umask without setting a new one? Set a safe one
308 # and then set it back to the old value.
309 umask = os.umask(077)
310 os.umask(umask)
311 os.chmod(self.tmp_path, 0666 & ~umask)
312 if sys.platform == 'win32' and os.path.exists(filename):
313 # NOTE: on windows (but not cygwin) rename will not replace an
314 # existing file, so it must be preceded with a remove. Sadly there
315 # is no way to make the switch atomic.
316 os.remove(filename)
317 os.rename(self.tmp_path, filename)
318 except Exception:
319 # Don't leave turds behind.
320 os.unlink(self.tmp_path)
321 raise
322
323 return Writer()
324
325
326 # From Alex Martelli,
327 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
328 # ASPN: Python Cookbook: Remove duplicates from a sequence
329 # First comment, dated 2001/10/13.
330 # (Also in the printed Python Cookbook.)
331
332 def uniquer(seq, idfun=None):
333 if idfun is None:
334 def idfun(x): return x
335 seen = {}
336 result = []
337 for item in seq:
338 marker = idfun(item)
339 if marker in seen: continue
340 seen[marker] = 1
341 result.append(item)
342 return result
+0
-0
mozc_build_tools/gyp/pylib/gyp/generator/__init__.py less more
(Empty file)
+0
-88
mozc_build_tools/gyp/pylib/gyp/generator/gypd.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypd output module
7
8 This module produces gyp input as its output. Output files are given the
9 .gypd extension to avoid overwriting the .gyp files that they are generated
10 from. Internal references to .gyp files (such as those found in
11 "dependencies" sections) are not adjusted to point to .gypd files instead;
12 unlike other paths, which are relative to the .gyp or .gypd file, such paths
13 are relative to the directory from which gyp was run to create the .gypd file.
14
15 This generator module is intended to be a sample and a debugging aid, hence
16 the "d" for "debug" in .gypd. It is useful to inspect the results of the
17 various merges, expansions, and conditional evaluations performed by gyp
18 and to see a representation of what would be fed to a generator module.
19
20 It's not advisable to rename .gypd files produced by this module to .gyp,
21 because they will have all merges, expansions, and evaluations already
22 performed and the relevant constructs not present in the output; paths to
23 dependencies may be wrong; and various sections that do not belong in .gyp
24 files such as such as "included_files" and "*_excluded" will be present.
25 Output will also be stripped of comments. This is not intended to be a
26 general-purpose gyp pretty-printer; for that, you probably just want to
27 run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
28 comments but won't do all of the other things done to this module's output.
29
30 The specific formatting of the output generated by this module is subject
31 to change.
32 """
33
34
35 import gyp.common
36 import errno
37 import os
38 import pprint
39
40
41 # These variables should just be spit back out as variable references.
42 _generator_identity_variables = [
43 'EXECUTABLE_PREFIX',
44 'EXECUTABLE_SUFFIX',
45 'INTERMEDIATE_DIR',
46 'PRODUCT_DIR',
47 'RULE_INPUT_ROOT',
48 'RULE_INPUT_EXT',
49 'RULE_INPUT_NAME',
50 'RULE_INPUT_PATH',
51 'SHARED_INTERMEDIATE_DIR',
52 ]
53
54 # gypd doesn't define a default value for OS like many other generator
55 # modules. Specify "-D OS=whatever" on the command line to provide a value.
56 generator_default_variables = {
57 }
58
59 # gypd supports multiple toolsets
60 generator_supports_multiple_toolsets = True
61
62 # TODO(mark): This always uses <, which isn't right. The input module should
63 # notify the generator to tell it which phase it is operating in, and this
64 # module should use < for the early phase and then switch to > for the late
65 # phase. Bonus points for carrying @ back into the output too.
66 for v in _generator_identity_variables:
67 generator_default_variables[v] = '<(%s)' % v
68
69
70 def GenerateOutput(target_list, target_dicts, data, params):
71 output_files = {}
72 for qualified_target in target_list:
73 [input_file, target] = \
74 gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
75
76 if input_file[-4:] != '.gyp':
77 continue
78 input_file_stem = input_file[:-4]
79 output_file = input_file_stem + params['options'].suffix + '.gypd'
80
81 if not output_file in output_files:
82 output_files[output_file] = input_file
83
84 for output_file, input_file in output_files.iteritems():
85 output = open(output_file, 'w')
86 pprint.pprint(data[input_file], output)
87 output.close()
+0
-57
mozc_build_tools/gyp/pylib/gyp/generator/gypsh.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypsh output module
7
8 gypsh is a GYP shell. It's not really a generator per se. All it does is
9 fire up an interactive Python session with a few local variables set to the
10 variables passed to the generator. Like gypd, it's intended as a debugging
11 aid, to facilitate the exploration of .gyp structures after being processed
12 by the input module.
13
14 The expected usage is "gyp -f gypsh -D OS=desired_os".
15 """
16
17
18 import code
19 import sys
20
21
22 # All of this stuff about generator variables was lovingly ripped from gypd.py.
23 # That module has a much better description of what's going on and why.
24 _generator_identity_variables = [
25 'EXECUTABLE_PREFIX',
26 'EXECUTABLE_SUFFIX',
27 'INTERMEDIATE_DIR',
28 'PRODUCT_DIR',
29 'RULE_INPUT_ROOT',
30 'RULE_INPUT_EXT',
31 'RULE_INPUT_NAME',
32 'RULE_INPUT_PATH',
33 'SHARED_INTERMEDIATE_DIR',
34 ]
35
36 generator_default_variables = {
37 }
38
39 for v in _generator_identity_variables:
40 generator_default_variables[v] = '<(%s)' % v
41
42
43 def GenerateOutput(target_list, target_dicts, data, params):
44 locals = {
45 'target_list': target_list,
46 'target_dicts': target_dicts,
47 'data': data,
48 }
49
50 # Use a banner that looks like the stock Python one and like what
51 # code.interact uses by default, but tack on something to indicate what
52 # locals are available, and identify gypsh.
53 banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
54 (sys.version, sys.platform, repr(sorted(locals.keys())))
55
56 code.interact(banner, local=locals)
+0
-1283
mozc_build_tools/gyp/pylib/gyp/generator/make.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # Notes:
7 #
8 # This is all roughly based on the Makefile system used by the Linux
9 # kernel, but is a non-recursive make -- we put the entire dependency
10 # graph in front of make and let it figure it out.
11 #
12 # The code below generates a separate .mk file for each target, but
13 # all are sourced by the top-level Makefile. This means that all
14 # variables in .mk-files clobber one another. Be careful to use :=
15 # where appropriate for immediate evaluation, and similarly to watch
16 # that you're not relying on a variable value to last beween different
17 # .mk files.
18 #
19 # TODOs:
20 #
21 # Global settings and utility functions are currently stuffed in the
22 # toplevel Makefile. It may make sense to generate some .mk files on
23 # the side to keep the the files readable.
24
25 import gyp
26 import gyp.common
27 import os.path
28
29 # Debugging-related imports -- remove me once we're solid.
30 import code
31 import pprint
32
33 generator_default_variables = {
34 'EXECUTABLE_PREFIX': '',
35 'EXECUTABLE_SUFFIX': '',
36 'OS': 'linux',
37 'STATIC_LIB_PREFIX': 'lib',
38 'SHARED_LIB_PREFIX': 'lib',
39 'STATIC_LIB_SUFFIX': '.a',
40 'SHARED_LIB_SUFFIX': '.so',
41 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni',
42 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
43 'PRODUCT_DIR': '$(builddir)',
44 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
45 'LIB_DIR': '$(obj).$(TOOLSET)',
46 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
47 'RULE_INPUT_PATH': '$(abspath $<)',
48 'RULE_INPUT_EXT': '$(suffix $<)',
49 'RULE_INPUT_NAME': '$(notdir $<)',
50
51 # This appears unused --- ?
52 'CONFIGURATION_NAME': '$(BUILDTYPE)',
53 }
54
55 # Make supports multiple toolsets
56 generator_supports_multiple_toolsets = True
57
58 def ensure_directory_exists(path):
59 dir = os.path.dirname(path)
60 if dir and not os.path.exists(dir):
61 os.makedirs(dir)
62
63 # Header of toplevel Makefile.
64 # This should go into the build tree, but it's easier to keep it here for now.
65 SHARED_HEADER = ("""\
66 # We borrow heavily from the kernel build setup, though we are simpler since
67 # we don't have Kconfig tweaking settings on us.
68
69 # The implicit make rules have it looking for RCS files, among other things.
70 # We instead explicitly write all the rules we care about.
71 # It's even quicker (saves ~200ms) to pass -r on the command line.
72 MAKEFLAGS=-r
73
74 # The V=1 flag on command line makes us verbosely print command lines.
75 ifdef V
76 quiet=
77 else
78 quiet=quiet_
79 endif
80
81 # Specify BUILDTYPE=Release on the command line for a release build.
82 BUILDTYPE ?= __default_configuration__
83
84 # Directory all our build output goes into.
85 # Note that this must be two directories beneath src/ for unit tests to pass,
86 # as they reach into the src/ directory for data with relative paths.
87 builddir ?= $(builddir_name)/$(BUILDTYPE)
88 abs_builddir := $(abspath $(builddir))
89 depsdir := $(builddir)/.deps
90
91 # Object output directory.
92 obj := $(builddir)/obj
93 abs_obj := $(abspath $(obj))
94
95 # We build up a list of every single one of the targets so we can slurp in the
96 # generated dependency rule Makefiles in one pass.
97 all_deps :=
98
99 # C++ apps need to be linked with g++. Not sure what's appropriate.
100 LINK ?= $(CXX)
101
102 CC.target ?= $(CC)
103 CFLAGS.target ?= $(CFLAGS)
104 CXX.target ?= $(CXX)
105 CXXFLAGS.target ?= $(CXXFLAGS)
106 LINK.target ?= $(LINK)
107 LDFLAGS.target ?= $(LDFLAGS)
108 AR.target ?= $(AR)
109 RANLIB.target ?= ranlib
110
111 CC.host ?= gcc
112 CFLAGS.host ?=
113 CXX.host ?= g++
114 CXXFLAGS.host ?=
115 LINK.host ?= g++
116 LDFLAGS.host ?=
117 AR.host ?= ar
118 RANLIB.host ?= ranlib
119
120 # Flags to make gcc output dependency info. Note that you need to be
121 # careful here to use the flags that ccache and distcc can understand.
122 # We write to a dep file on the side first and then rename at the end
123 # so we can't end up with a broken dep file.
124 depfile = $(depsdir)/$@.d
125 DEPFLAGS = -MMD -MF $(depfile).raw
126
127 # We have to fixup the deps output in a few ways.
128 # (1) the file output should mention the proper .o file.
129 # ccache or distcc lose the path to the target, so we convert a rule of
130 # the form:
131 # foobar.o: DEP1 DEP2
132 # into
133 # path/to/foobar.o: DEP1 DEP2
134 # (2) we want missing files not to cause us to fail to build.
135 # We want to rewrite
136 # foobar.o: DEP1 DEP2 \\
137 # DEP3
138 # to
139 # DEP1:
140 # DEP2:
141 # DEP3:
142 # so if the files are missing, they're just considered phony rules.
143 # We have to do some pretty insane escaping to get those backslashes
144 # and dollar signs past make, the shell, and sed at the same time."""
145 r"""
146 define fixup_dep
147 # Fixup path as in (1).
148 sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
149 # Add extra rules as in (2).
150 # We remove slashes and replace spaces with new lines;
151 # remove blank lines;
152 # delete the first line and append a colon to the remaining lines.
153 sed -e 's|\\||' -e 's| |\n|g' $(depfile).raw |\
154 grep -v '^$$' |\
155 sed -e 1d -e 's|$$|:|' \
156 >> $(depfile)
157 rm $(depfile).raw
158 endef
159 """
160 """
161 # Command definitions:
162 # - cmd_foo is the actual command to run;
163 # - quiet_cmd_foo is the brief-output summary of the command.
164
165 quiet_cmd_cc = CC($(TOOLSET)) $@
166 cmd_cc = $(CC.$(TOOLSET)) $(CFLAGS.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) -c -o $@ $<
167
168 quiet_cmd_cxx = CXX($(TOOLSET)) $@
169 cmd_cxx = $(CXX.$(TOOLSET)) $(CXXFLAGS.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) -c -o $@ $<
170
171 quiet_cmd_alink = AR+RANLIB($(TOOLSET)) $@
172 cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) rc $@ $(filter %.o,$^) && $(RANLIB.$(TOOLSET)) $@
173
174 quiet_cmd_touch = TOUCH $@
175 cmd_touch = touch $@
176
177 quiet_cmd_copy = COPY $@
178 # send stderr to /dev/null to ignore messages when linking directories.
179 cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@
180
181 # Due to circular dependencies between libraries :(, we wrap the
182 # special "figure out circular dependencies" flags around the entire
183 # input list during linking.
184 quiet_cmd_link = LINK($(TOOLSET)) $@
185 cmd_link = $(LINK.$(TOOLSET)) $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
186
187 # Shared-object link (for generating .so).
188 # Set SONAME to the library filename so our binaries don't reference the local,
189 # absolute paths used on the link command-line.
190 # TODO: perhaps this can share with the LINK command above?
191 quiet_cmd_solink = SOLINK($(TOOLSET)) $@
192 cmd_solink = $(LINK.$(TOOLSET)) -shared $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
193 """
194 r"""
195 # Define an escape_quotes function to escape single quotes.
196 # This allows us to handle quotes properly as long as we always use
197 # use single quotes and escape_quotes.
198 escape_quotes = $(subst ','\'',$(1))
199 # This comment is here just to include a ' to unconfuse syntax highlighting.
200 # Define an escape_vars function to escape '$' variable syntax.
201 # This allows us to read/write command lines with shell variables (e.g.
202 # $LD_LIBRARY_PATH), without triggering make substitution.
203 escape_vars = $(subst $$,$$$$,$(1))
204 # Helper that expands to a shell command to echo a string exactly as it is in
205 # make. This uses printf instead of echo because printf's behaviour with respect
206 # to escape sequences is more portable than echo's across different shells
207 # (e.g., dash, bash).
208 exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
209 """
210 """
211 # Helper to compare the command we're about to run against the command
212 # we logged the last time we ran the command. Produces an empty
213 # string (false) when the commands match.
214 # Tricky point: Make has no string-equality test function.
215 # The kernel uses the following, but it seems like it would have false
216 # positives, where one string reordered its arguments.
217 # arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
218 # $(filter-out $(cmd_$@), $(cmd_$(1))))
219 # We instead substitute each for the empty string into the other, and
220 # say they're equal if both substitutions produce the empty string.
221 command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\
222 $(subst $(cmd_$@),,$(cmd_$(1))))
223
224 # Helper that is non-empty when a prerequisite changes.
225 # Normally make does this implicitly, but we force rules to always run
226 # so we can check their command lines.
227 # $? -- new prerequisites
228 # $| -- order-only dependencies
229 prereq_changed = $(filter-out $|,$?)
230
231 # do_cmd: run a command via the above cmd_foo names, if necessary.
232 # Should always run for a given target to handle command-line changes.
233 # Second argument, if non-zero, makes it do C/C++ dependency munging.
234 define do_cmd
235 $(if $(or $(command_changed),$(prereq_changed)),
236 @$(call exact_echo, $($(quiet)cmd_$(1)))
237 @mkdir -p $(dir $@) $(dir $(depfile))
238 @$(cmd_$(1))
239 @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile)
240 @$(if $(2),$(fixup_dep))
241 )
242 endef
243
244 # Declare "all" target first so it is the default, even though we don't have the
245 # deps yet.
246 .PHONY: all
247 all:
248
249 # make looks for ways to re-generate included makefiles, but in our case, we
250 # don't have a direct way. Explicitly telling make that it has nothing to do
251 # for them makes it go faster.
252 %.d: ;
253
254 # Use FORCE_DO_CMD to force a target to run. Should be coupled with
255 # do_cmd.
256 .PHONY: FORCE_DO_CMD
257 FORCE_DO_CMD:
258
259 """)
260
261 ROOT_HEADER_SUFFIX_RULES = ("""\
262 # Suffix rules, putting all outputs into $(obj).
263 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
264 @$(call do_cmd,cc,1)
265 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
266 @$(call do_cmd,cc)
267 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
268 @$(call do_cmd,cc)
269 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
270 @$(call do_cmd,cxx,1)
271 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
272 @$(call do_cmd,cxx,1)
273 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
274 @$(call do_cmd,cxx,1)
275
276 # Try building from generated source, too.
277 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
278 @$(call do_cmd,cc,1)
279 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
280 @$(call do_cmd,cc)
281 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
282 @$(call do_cmd,cc)
283 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
284 @$(call do_cmd,cxx,1)
285 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
286 @$(call do_cmd,cxx,1)
287 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
288 @$(call do_cmd,cxx,1)
289
290 $(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
291 @$(call do_cmd,cc,1)
292 $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
293 @$(call do_cmd,cc)
294 $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
295 @$(call do_cmd,cc)
296 $(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
297 @$(call do_cmd,cxx,1)
298 $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
299 @$(call do_cmd,cxx,1)
300 $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
301 @$(call do_cmd,cxx,1)
302 """)
303
304 SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
305 # Suffix rules, putting all outputs into $(obj).
306 """)
307
308 SHARED_HEADER_SUFFIX_RULES_SRCDIR = {
309 '.c': ("""\
310 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
311 @$(call do_cmd,cc,1)
312 """),
313 '.s': ("""\
314 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
315 @$(call do_cmd,cc)
316 """),
317 '.S': ("""\
318 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
319 @$(call do_cmd,cc)
320 """),
321 '.cpp': ("""\
322 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
323 @$(call do_cmd,cxx,1)
324 """),
325 '.cc': ("""\
326 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
327 @$(call do_cmd,cxx,1)
328 """),
329 '.cxx': ("""\
330 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
331 @$(call do_cmd,cxx,1)
332 """),
333 }
334
335 SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
336 # Try building from generated source, too.
337 """)
338
339 SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = {
340 '.c': ("""\
341 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
342 @$(call do_cmd,cc,1)
343 """),
344 '.cc': ("""\
345 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
346 @$(call do_cmd,cxx,1)
347 """),
348 '.cpp': ("""\
349 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
350 @$(call do_cmd,cxx,1)
351 """),
352 }
353
354 SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = {
355 '.c': ("""\
356 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
357 @$(call do_cmd,cc,1)
358 """),
359 '.cc': ("""\
360 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
361 @$(call do_cmd,cxx,1)
362 """),
363 '.cpp': ("""\
364 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
365 @$(call do_cmd,cxx,1)
366 """),
367 }
368
369 SHARED_HEADER_SUFFIX_RULES = (
370 SHARED_HEADER_SUFFIX_RULES_COMMENT1 +
371 ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) +
372 SHARED_HEADER_SUFFIX_RULES_COMMENT2 +
373 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) +
374 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values())
375 )
376
377 # This gets added to the very beginning of the Makefile.
378 SHARED_HEADER_SRCDIR = ("""\
379 # The source directory tree.
380 srcdir := %s
381
382 """)
383
384 SHARED_HEADER_BUILDDIR_NAME = ("""\
385 # The name of the builddir.
386 builddir_name ?= %s
387
388 """)
389
390 SHARED_FOOTER = """\
391 # "all" is a concatenation of the "all" targets from all the included
392 # sub-makefiles. This is just here to clarify.
393 all:
394
395 # Add in dependency-tracking rules. $(all_deps) is the list of every single
396 # target in our tree. First, only consider targets that already have been
397 # built, as unbuilt targets will be built regardless of dependency info:
398 all_deps := $(wildcard $(sort $(all_deps)))
399 # Of those, only consider the ones with .d (dependency) info:
400 d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
401 ifneq ($(d_files),)
402 include $(d_files)
403 endif
404 """
405
406 header = """\
407 # This file is generated by gyp; do not edit.
408
409 """
410
411
412 def Compilable(filename):
413 """Return true if the file is compilable (should be in OBJS)."""
414 for res in (filename.endswith(e) for e
415 in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']):
416 if res:
417 return True
418 return False
419
420
421 def Target(filename):
422 """Translate a compilable filename to its .o target."""
423 return os.path.splitext(filename)[0] + '.o'
424
425
426 def EscapeShellArgument(s):
427 """Quotes an argument so that it will be interpreted literally by a POSIX
428 shell. Taken from
429 http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
430 """
431 return "'" + s.replace("'", "'\\''") + "'"
432
433
434 def EscapeMakeVariableExpansion(s):
435 """Make has its own variable expansion syntax using $. We must escape it for
436 string to be interpreted literally."""
437 return s.replace('$', '$$')
438
439
440 def EscapeCppDefine(s):
441 """Escapes a CPP define so that it will reach the compiler unaltered."""
442 s = EscapeShellArgument(s)
443 s = EscapeMakeVariableExpansion(s)
444 return s
445
446
447 def QuoteIfNecessary(string):
448 """TODO: Should this ideally be replaced with one or more of the above
449 functions?"""
450 if '"' in string:
451 string = '"' + string.replace('"', '\\"') + '"'
452 return string
453
454
455 srcdir_prefix = ''
456 def Sourceify(path):
457 """Convert a path to its source directory form."""
458 if '$(' in path:
459 return path
460 if os.path.isabs(path):
461 return path
462 return srcdir_prefix + path
463
464
465 # Map from qualified target to path to output.
466 target_outputs = {}
467 # Map from qualified target to a list of all linker dependencies,
468 # transitively expanded.
469 # Used in building shared-library-based executables.
470 target_link_deps = {}
471
472
473 class MakefileWriter:
474 """MakefileWriter packages up the writing of one target-specific foobar.mk.
475
476 Its only real entry point is Write(), and is mostly used for namespacing.
477 """
478
479 def Write(self, qualified_target, base_path, output_filename, spec, configs,
480 part_of_all):
481 """The main entry point: writes a .mk file for a single target.
482
483 Arguments:
484 qualified_target: target we're generating
485 base_path: path relative to source root we're building in, used to resolve
486 target-relative paths
487 output_filename: output .mk file name to write
488 spec, configs: gyp info
489 part_of_all: flag indicating this target is part of 'all'
490 """
491 print 'Generating %s' % output_filename
492
493 ensure_directory_exists(output_filename)
494
495 self.fp = open(output_filename, 'w')
496
497 self.fp.write(header)
498
499 self.path = base_path
500 self.target = spec['target_name']
501 self.type = spec['type']
502 self.toolset = spec['toolset']
503
504 deps, link_deps = self.ComputeDeps(spec)
505
506 # Some of the generation below can add extra output, sources, or
507 # link dependencies. All of the out params of the functions that
508 # follow use names like extra_foo.
509 extra_outputs = []
510 extra_sources = []
511 extra_link_deps = []
512
513 self.output = self.ComputeOutput(spec)
514 self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
515 'shared_library')
516 if self.type in self._INSTALLABLE_TARGETS:
517 self.alias = os.path.basename(self.output)
518 else:
519 self.alias = self.output
520
521 self.WriteLn("TOOLSET := " + self.toolset)
522 self.WriteLn("TARGET := " + self.target)
523
524 # Actions must come first, since they can generate more OBJs for use below.
525 if 'actions' in spec:
526 self.WriteActions(spec['actions'], extra_sources, extra_outputs,
527 part_of_all)
528
529 # Rules must be early like actions.
530 if 'rules' in spec:
531 self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all)
532
533 if 'copies' in spec:
534 self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
535
536 all_sources = spec.get('sources', []) + extra_sources
537 if all_sources:
538 self.WriteSources(configs, deps, all_sources,
539 extra_outputs, extra_link_deps, part_of_all)
540 sources = filter(Compilable, all_sources)
541 if sources:
542 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
543 extensions = set([os.path.splitext(s)[1] for s in sources])
544 for ext in extensions:
545 if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR:
546 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext])
547 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
548 for ext in extensions:
549 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1:
550 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext])
551 for ext in extensions:
552 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2:
553 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext])
554 self.WriteLn('# End of this set of suffix rules')
555
556
557 self.WriteTarget(spec, configs, deps,
558 extra_link_deps + link_deps, extra_outputs, part_of_all)
559
560 # Update global list of target outputs, used in dependency tracking.
561 target_outputs[qualified_target] = self.alias
562
563 # Update global list of link dependencies.
564 if self.type == 'static_library':
565 target_link_deps[qualified_target] = [self.output]
566 elif self.type == 'shared_library':
567 # Anyone that uses us transitively depend on all of our link
568 # dependencies.
569 target_link_deps[qualified_target] = [self.output] + link_deps
570
571 self.fp.close()
572
573
574 def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
575 """Write a "sub-project" Makefile.
576
577 This is a small, wrapper Makefile that calls the top-level Makefile to build
578 the targets from a single gyp file (i.e. a sub-project).
579
580 Arguments:
581 output_filename: sub-project Makefile name to write
582 makefile_path: path to the top-level Makefile
583 targets: list of "all" targets for this sub-project
584 build_dir: build output directory, relative to the sub-project
585 """
586 print 'Generating %s' % output_filename
587
588 ensure_directory_exists(output_filename)
589 self.fp = open(output_filename, 'w')
590 self.fp.write(header)
591 # For consistency with other builders, put sub-project build output in the
592 # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
593 self.WriteLn('export builddir_name ?= %s' %
594 os.path.join(os.path.dirname(output_filename), build_dir))
595 self.WriteLn('.PHONY: all')
596 self.WriteLn('all:')
597 if makefile_path:
598 makefile_path = ' -C ' + makefile_path
599 self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
600 self.fp.close()
601
602
603 def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all):
604 """Write Makefile code for any 'actions' from the gyp input.
605
606 extra_sources: a list that will be filled in with newly generated source
607 files, if any
608 extra_outputs: a list that will be filled in with any outputs of these
609 actions (used to make other pieces dependent on these
610 actions)
611 part_of_all: flag indicating this target is part of 'all'
612 """
613 for action in actions:
614 name = self.target + '_' + action['action_name']
615 self.WriteLn('### Rules for action "%s":' % action['action_name'])
616 inputs = action['inputs']
617 outputs = action['outputs']
618
619 # Build up a list of outputs.
620 # Collect the output dirs we'll need.
621 dirs = set()
622 for out in outputs:
623 dir = os.path.split(out)[0]
624 if dir:
625 dirs.add(dir)
626 if int(action.get('process_outputs_as_sources', False)):
627 extra_sources += outputs
628
629 # Write the actual command.
630 command = gyp.common.EncodePOSIXShellList(action['action'])
631 if 'message' in action:
632 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
633 else:
634 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
635 if len(dirs) > 0:
636 command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
637 # Set LD_LIBRARY_PATH in case the action runs an executable from this
638 # build which links to shared libs from this build.
639 if self.path:
640 cd_action = 'cd %s; ' % Sourceify(self.path)
641 else:
642 cd_action = ''
643 # actions run on the host, so they should in theory only use host
644 # libraries, but until everything is made cross-compile safe, also use
645 # target libraries.
646 # TODO(piman): when everything is cross-compile safe, remove lib.target
647 self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:'
648 '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s'
649 % (name, cd_action, command))
650 self.WriteLn()
651 outputs = map(self.Absolutify, outputs)
652 # The makefile rules are all relative to the top dir, but the gyp actions
653 # are defined relative to their containing dir. This replaces the obj
654 # variable for the action rule with an absolute version so that the output
655 # goes in the right place.
656 # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
657 # it's superfluous for the "extra outputs", and this avoids accidentally
658 # writing duplicate dummy rules for those outputs.
659 self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
660 self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
661 self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
662 part_of_all=part_of_all, command=name)
663
664 # Stuff the outputs in a variable so we can refer to them later.
665 outputs_variable = 'action_%s_outputs' % name
666 self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
667 extra_outputs.append('$(%s)' % outputs_variable)
668 self.WriteLn()
669
670 self.WriteLn()
671
672
673 def WriteRules(self, rules, extra_sources, extra_outputs, part_of_all):
674 """Write Makefile code for any 'rules' from the gyp input.
675
676 extra_sources: a list that will be filled in with newly generated source
677 files, if any
678 extra_outputs: a list that will be filled in with any outputs of these
679 rules (used to make other pieces dependent on these rules)
680 part_of_all: flag indicating this target is part of 'all'
681 """
682 for rule in rules:
683 name = self.target + '_' + rule['rule_name']
684 count = 0
685 self.WriteLn('### Generated for rule %s:' % name)
686
687 all_outputs = []
688
689 for rule_source in rule['rule_sources']:
690 dirs = set()
691 rule_source_basename = os.path.basename(rule_source)
692 (rule_source_root, rule_source_ext) = \
693 os.path.splitext(rule_source_basename)
694
695 outputs = [self.ExpandInputRoot(out, rule_source_root)
696 for out in rule['outputs']]
697 for out in outputs:
698 dir = os.path.dirname(out)
699 if dir:
700 dirs.add(dir)
701 if int(rule.get('process_outputs_as_sources', False)):
702 extra_sources.append(out)
703 all_outputs += outputs
704 inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
705 rule.get('inputs', [])))
706 actions = ['$(call do_cmd,%s_%d)' % (name, count)]
707
708 if name == 'resources_grit':
709 # HACK: This is ugly. Grit intentionally doesn't touch the
710 # timestamp of its output file when the file doesn't change,
711 # which is fine in hash-based dependency systems like scons
712 # and forge, but not kosher in the make world. After some
713 # discussion, hacking around it here seems like the least
714 # amount of pain.
715 actions += ['@touch --no-create $@']
716
717 # Only write the 'obj' and 'builddir' rules for the "primary" output
718 # (:1); it's superfluous for the "extra outputs", and this avoids
719 # accidentally writing duplicate dummy rules for those outputs.
720 self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
721 self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
722 self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
723 self.WriteLn('all_deps += %s' % ' '.join(outputs))
724
725 action = [self.ExpandInputRoot(ac, rule_source_root)
726 for ac in rule['action']]
727 mkdirs = ''
728 if len(dirs) > 0:
729 mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
730 if self.path:
731 cd_action = 'cd %s; ' % Sourceify(self.path)
732 else:
733 cd_action = ''
734 # Set LD_LIBRARY_PATH in case the rule runs an executable from this
735 # build which links to shared libs from this build.
736 # rules run on the host, so they should in theory only use host
737 # libraries, but until everything is made cross-compile safe, also use
738 # target libraries.
739 # TODO(piman): when everything is cross-compile safe, remove lib.target
740 self.WriteLn(
741 "cmd_%(name)s_%(count)d = export LD_LIBRARY_PATH="
742 "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
743 "%(cd_action)s%(mkdirs)s%(action)s" % {
744 'action': gyp.common.EncodePOSIXShellList(action),
745 'cd_action': cd_action,
746 'count': count,
747 'mkdirs': mkdirs,
748 'name': name,
749 })
750 self.WriteLn(
751 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
752 'count': count,
753 'name': name,
754 })
755 self.WriteLn()
756 count += 1
757
758 outputs_variable = 'rule_%s_outputs' % name
759 self.WriteList(all_outputs, outputs_variable)
760 extra_outputs.append('$(%s)' % outputs_variable)
761
762 self.WriteLn('### Finished generating for rule: %s' % name)
763 self.WriteLn()
764 self.WriteLn('### Finished generating for all rules')
765 self.WriteLn('')
766
767
768 def WriteCopies(self, copies, extra_outputs, part_of_all):
769 """Write Makefile code for any 'copies' from the gyp input.
770
771 extra_outputs: a list that will be filled in with any outputs of this action
772 (used to make other pieces dependent on this action)
773 part_of_all: flag indicating this target is part of 'all'
774 """
775 self.WriteLn('### Generated for copy rule.')
776
777 variable = self.target + '_copies'
778 outputs = []
779 for copy in copies:
780 for path in copy['files']:
781 path = Sourceify(self.Absolutify(path))
782 filename = os.path.split(path)[1]
783 output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
784 filename)))
785 self.WriteDoCmd([output], [path], 'copy', part_of_all)
786 outputs.append(output)
787 self.WriteLn('%s = %s' % (variable, ' '.join(outputs)))
788 extra_outputs.append('$(%s)' % variable)
789 self.WriteLn()
790
791
792 def WriteSources(self, configs, deps, sources,
793 extra_outputs, extra_link_deps,
794 part_of_all):
795 """Write Makefile code for any 'sources' from the gyp input.
796 These are source files necessary to build the current target.
797
798 configs, deps, sources: input from gyp.
799 extra_outputs: a list of extra outputs this action should be dependent on;
800 used to serialize action/rules before compilation
801 extra_link_deps: a list that will be filled in with any outputs of
802 compilation (to be used in link lines)
803 part_of_all: flag indicating this target is part of 'all'
804 """
805
806 # Write configuration-specific variables for CFLAGS, etc.
807 for configname in sorted(configs.keys()):
808 config = configs[configname]
809 self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
810 quoter=EscapeCppDefine)
811 self.WriteLn("# Flags passed to both C and C++ files.");
812 self.WriteList(config.get('cflags'), 'CFLAGS_%s' % configname)
813 self.WriteLn("# Flags passed to only C (and not C++) files.");
814 self.WriteList(config.get('cflags_c'), 'CFLAGS_C_%s' % configname)
815 self.WriteLn("# Flags passed to only C++ (and not C) files.");
816 self.WriteList(config.get('cflags_cc'), 'CFLAGS_CC_%s' % configname)
817 includes = config.get('include_dirs')
818 if includes:
819 includes = map(Sourceify, map(self.Absolutify, includes))
820 self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
821
822 sources = filter(Compilable, sources)
823 objs = map(self.Objectify, map(self.Absolutify, map(Target, sources)))
824 self.WriteList(objs, 'OBJS')
825
826 self.WriteLn('# Add to the list of files we specially track '
827 'dependencies for.')
828 self.WriteLn('all_deps += $(OBJS)')
829 self.WriteLn()
830
831 # Make sure our dependencies are built first.
832 if deps:
833 self.WriteMakeRule(['$(OBJS)'], deps,
834 comment = 'Make sure our dependencies are built '
835 'before any of us.',
836 order_only = True)
837
838 # Make sure the actions and rules run first.
839 # If they generate any extra headers etc., the per-.o file dep tracking
840 # will catch the proper rebuilds, so order only is still ok here.
841 if extra_outputs:
842 self.WriteMakeRule(['$(OBJS)'], extra_outputs,
843 comment = 'Make sure our actions/rules run '
844 'before any of us.',
845 order_only = True)
846
847 if objs:
848 extra_link_deps.append('$(OBJS)')
849 self.WriteLn("""\
850 # CFLAGS et al overrides must be target-local.
851 # See "Target-specific Variable Values" in the GNU Make manual.""")
852 self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
853 self.WriteLn("$(OBJS): GYP_CFLAGS := $(CFLAGS_$(BUILDTYPE)) "
854 "$(CFLAGS_C_$(BUILDTYPE)) "
855 "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))")
856 self.WriteLn("$(OBJS): GYP_CXXFLAGS := $(CFLAGS_$(BUILDTYPE)) "
857 "$(CFLAGS_CC_$(BUILDTYPE)) "
858 "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))")
859
860 self.WriteLn()
861
862
863 def ComputeOutput(self, spec):
864 """Return the 'output' (full output path) of a gyp spec.
865
866 E.g., the loadable module 'foobar' in directory 'baz' will produce
867 '$(obj)/baz/libfoobar.so'
868 """
869 output = None
870 target = spec['target_name']
871 target_prefix = ''
872 target_ext = ''
873 path = os.path.join('$(obj).' + self.toolset, self.path)
874 if self.type == 'static_library':
875 if target[:3] == 'lib':
876 target = target[3:]
877 target_prefix = 'lib'
878 target_ext = '.a'
879 elif self.type in ('loadable_module', 'shared_library'):
880 if target[:3] == 'lib':
881 target = target[3:]
882 target_prefix = 'lib'
883 target_ext = '.so'
884 elif self.type == 'none':
885 target = '%s.stamp' % target
886 elif self.type == 'settings':
887 return None
888 elif self.type == 'executable':
889 path = os.path.join('$(builddir)')
890 else:
891 print ("ERROR: What output file should be generated?",
892 "typ", self.type, "target", target)
893
894 path = spec.get('product_dir', path)
895 target_prefix = spec.get('product_prefix', target_prefix)
896 target = spec.get('product_name', target)
897 product_ext = spec.get('product_extension')
898 if product_ext:
899 target_ext = '.' + product_ext
900
901 return os.path.join(path, target_prefix + target + target_ext)
902
903
904 def ComputeDeps(self, spec):
905 """Compute the dependencies of a gyp spec.
906
907 Returns a tuple (deps, link_deps), where each is a list of
908 filenames that will need to be put in front of make for either
909 building (deps) or linking (link_deps).
910 """
911 deps = []
912 link_deps = []
913 if 'dependencies' in spec:
914 deps.extend([target_outputs[dep] for dep in spec['dependencies']
915 if target_outputs[dep]])
916 for dep in spec['dependencies']:
917 if dep in target_link_deps:
918 link_deps.extend(target_link_deps[dep])
919 deps.extend(link_deps)
920 # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
921 # This hack makes it work:
922 # link_deps.extend(spec.get('libraries', []))
923 return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
924
925
926 def WriteTarget(self, spec, configs, deps, link_deps, extra_outputs,
927 part_of_all):
928 """Write Makefile code to produce the final target of the gyp spec.
929
930 spec, configs: input from gyp.
931 deps, link_deps: dependency lists; see ComputeDeps()
932 extra_outputs: any extra outputs that our target should depend on
933 part_of_all: flag indicating this target is part of 'all'
934 """
935
936 self.WriteLn('### Rules for final target.')
937
938 if extra_outputs:
939 self.WriteMakeRule([self.output], extra_outputs,
940 comment = 'Build our special outputs first.',
941 order_only = True)
942 self.WriteMakeRule(extra_outputs, deps,
943 comment=('Preserve order dependency of '
944 'special output on deps.'),
945 order_only = True,
946 multiple_output_trick = False)
947
948 if self.type not in ('settings', 'none'):
949 for configname in sorted(configs.keys()):
950 config = configs[configname]
951 self.WriteList(config.get('ldflags'), 'LDFLAGS_%s' % configname)
952 libraries = spec.get('libraries')
953 if libraries:
954 # Remove duplicate entries
955 libraries = gyp.common.uniquer(libraries)
956 self.WriteList(libraries, 'LIBS')
957 self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output)
958 self.WriteLn('%s: LIBS := $(LIBS)' % self.output)
959
960 if self.type == 'executable':
961 self.WriteDoCmd([self.output], link_deps, 'link', part_of_all)
962 elif self.type == 'static_library':
963 self.WriteDoCmd([self.output], link_deps, 'alink', part_of_all)
964 elif self.type in ('loadable_module', 'shared_library'):
965 self.WriteDoCmd([self.output], link_deps, 'solink', part_of_all)
966 elif self.type == 'none':
967 # Write a stamp line.
968 self.WriteDoCmd([self.output], deps, 'touch', part_of_all)
969 elif self.type == 'settings':
970 # Only used for passing flags around.
971 pass
972 else:
973 print "WARNING: no output for", self.type, target
974
975 # Add an alias for each target (if there are any outputs).
976 # Installable target aliases are created below.
977 if ((self.output and self.output != self.target) and
978 (self.type not in self._INSTALLABLE_TARGETS)):
979 self.WriteMakeRule([self.target], [self.output],
980 comment='Add target alias', phony = True)
981 if part_of_all:
982 self.WriteMakeRule(['all'], [self.target],
983 comment = 'Add target alias to "all" target.',
984 phony = True)
985
986 # Add special-case rules for our installable targets.
987 # 1) They need to install to the build dir or "product" dir.
988 # 2) They get shortcuts for building (e.g. "make chrome").
989 # 3) They are part of "make all".
990 if self.type in self._INSTALLABLE_TARGETS:
991 if self.type in ('shared_library'):
992 file_desc = 'shared library'
993 # Install all shared libs into a common directory (per toolset) for
994 # convenient access with LD_LIBRARY_PATH.
995 binpath = '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
996 else:
997 file_desc = 'executable'
998 binpath = '$(builddir)/' + self.alias
999 installable_deps = [self.output]
1000 # Point the target alias to the final binary output.
1001 self.WriteMakeRule([self.target], [binpath],
1002 comment='Add target alias', phony = True)
1003 if binpath != self.output:
1004 self.WriteDoCmd([binpath], [self.output], 'copy',
1005 comment = 'Copy this to the %s output path.' %
1006 file_desc, part_of_all=part_of_all)
1007 installable_deps.append(binpath)
1008 if self.output != self.alias and self.alias != self.target:
1009 self.WriteMakeRule([self.alias], installable_deps,
1010 comment = 'Short alias for building this %s.' %
1011 file_desc, phony = True)
1012 if part_of_all:
1013 self.WriteMakeRule(['all'], [binpath],
1014 comment = 'Add %s to "all" target.' % file_desc,
1015 phony = True)
1016
1017
1018 def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary):
1019 """Write a variable definition that is a list of values.
1020
1021 E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
1022 foo = blaha blahb
1023 but in a pretty-printed style.
1024 """
1025 self.fp.write(variable + " := ")
1026 if list:
1027 list = [quoter(prefix + l) for l in list]
1028 self.fp.write(" \\\n\t".join(list))
1029 self.fp.write("\n\n")
1030
1031
1032 def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None):
1033 """Write a Makefile rule that uses do_cmd.
1034
1035 This makes the outputs dependent on the command line that was run,
1036 as well as support the V= make command line flag.
1037 """
1038 self.WriteMakeRule(outputs, inputs,
1039 actions = ['$(call do_cmd,%s)' % command],
1040 comment = comment,
1041 force = True)
1042 # Add our outputs to the list of targets we read depfiles from.
1043 self.WriteLn('all_deps += %s' % ' '.join(outputs))
1044
1045
1046 def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
1047 order_only=False, force=False, phony=False,
1048 multiple_output_trick=True):
1049 """Write a Makefile rule, with some extra tricks.
1050
1051 outputs: a list of outputs for the rule (note: this is not directly
1052 supported by make; see comments below)
1053 inputs: a list of inputs for the rule
1054 actions: a list of shell commands to run for the rule
1055 comment: a comment to put in the Makefile above the rule (also useful
1056 for making this Python script's code self-documenting)
1057 order_only: if true, makes the dependency order-only
1058 force: if true, include FORCE_DO_CMD as an order-only dep
1059 phony: if true, the rule does not actually generate the named output, the
1060 output is just a name to run the rule
1061 multiple_output_trick: if true (the default), perform tricks such as dummy
1062 rules to avoid problems with multiple outputs.
1063 """
1064 if comment:
1065 self.WriteLn('# ' + comment)
1066 if phony:
1067 self.WriteLn('.PHONY: ' + ' '.join(outputs))
1068 # TODO(evanm): just make order_only a list of deps instead of these hacks.
1069 if order_only:
1070 order_insert = '| '
1071 else:
1072 order_insert = ''
1073 if force:
1074 force_append = ' FORCE_DO_CMD'
1075 else:
1076 force_append = ''
1077 if actions:
1078 self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
1079 self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs),
1080 force_append))
1081 if actions:
1082 for action in actions:
1083 self.WriteLn('\t%s' % action)
1084 if multiple_output_trick and len(outputs) > 1:
1085 # If we have more than one output, a rule like
1086 # foo bar: baz
1087 # that for *each* output we must run the action, potentially
1088 # in parallel. That is not what we're trying to write -- what
1089 # we want is that we run the action once and it generates all
1090 # the files.
1091 # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
1092 # discusses this problem and has this solution:
1093 # 1) Write the naive rule that would produce parallel runs of
1094 # the action.
1095 # 2) Make the outputs seralized on each other, so we won't start
1096 # a parallel run until the first run finishes, at which point
1097 # we'll have generated all the outputs and we're done.
1098 self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
1099 # Add a dummy command to the "extra outputs" rule, otherwise make seems to
1100 # think these outputs haven't (couldn't have?) changed, and thus doesn't
1101 # flag them as changed (i.e. include in '$?') when evaluating dependent
1102 # rules, which in turn causes do_cmd() to skip running dependent commands.
1103 self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
1104 self.WriteLn()
1105
1106
1107 def WriteLn(self, text=''):
1108 self.fp.write(text + '\n')
1109
1110
1111 def Objectify(self, path):
1112 """Convert a path to its output directory form."""
1113 if '$(' in path:
1114 path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
1115 return path
1116 return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
1117
1118 def Absolutify(self, path):
1119 """Convert a subdirectory-relative path into a base-relative path.
1120 Skips over paths that contain variables."""
1121 if '$(' in path:
1122 return path
1123 return os.path.normpath(os.path.join(self.path, path))
1124
1125
1126 def FixupArgPath(self, arg):
1127 if '/' in arg or '.h.' in arg:
1128 return self.Absolutify(arg)
1129 return arg
1130
1131
1132 def ExpandInputRoot(self, template, expansion):
1133 if '%(INPUT_ROOT)s' not in template:
1134 return template
1135 path = template % { 'INPUT_ROOT': expansion }
1136 if not os.path.dirname(path):
1137 # If it's just the file name, turn it into a path so FixupArgPath()
1138 # will know to Absolutify() it.
1139 path = os.path.join('.', path)
1140 return path
1141
1142
1143 def GenerateOutput(target_list, target_dicts, data, params):
1144 options = params['options']
1145 generator_flags = params.get('generator_flags', {})
1146 builddir_name = generator_flags.get('output_dir', 'out')
1147
1148 def CalculateMakefilePath(build_file, base_name):
1149 """Determine where to write a Makefile for a given gyp file."""
1150 # Paths in gyp files are relative to the .gyp file, but we want
1151 # paths relative to the source root for the master makefile. Grab
1152 # the path of the .gyp file as the base to relativize against.
1153 # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
1154 base_path = gyp.common.RelativePath(os.path.dirname(build_file),
1155 options.depth)
1156 # We write the file in the base_path directory.
1157 output_file = os.path.join(options.depth, base_path, base_name)
1158 if options.generator_output:
1159 output_file = os.path.join(options.generator_output, output_file)
1160 return (base_path, output_file)
1161
1162 # TODO: search for the first non-'Default' target. This can go
1163 # away when we add verification that all targets have the
1164 # necessary configurations.
1165 default_configuration = None
1166 toolsets = set([target_dicts[target]['toolset'] for target in target_list])
1167 for target in target_list:
1168 spec = target_dicts[target]
1169 if spec['default_configuration'] != 'Default':
1170 default_configuration = spec['default_configuration']
1171 break
1172 if not default_configuration:
1173 default_configuration = 'Default'
1174
1175 srcdir = '.'
1176 makefile_name = 'Makefile' + options.suffix
1177 makefile_path = os.path.join(options.depth, makefile_name)
1178 if options.generator_output:
1179 global srcdir_prefix
1180 makefile_path = os.path.join(options.generator_output, makefile_path)
1181 srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
1182 srcdir_prefix = '$(srcdir)/'
1183 ensure_directory_exists(makefile_path)
1184 root_makefile = open(makefile_path, 'w')
1185 root_makefile.write(SHARED_HEADER_SRCDIR % srcdir)
1186 root_makefile.write(SHARED_HEADER_BUILDDIR_NAME % builddir_name)
1187 root_makefile.write(SHARED_HEADER.replace('__default_configuration__',
1188 default_configuration))
1189 for toolset in toolsets:
1190 root_makefile.write('TOOLSET := %s\n' % toolset)
1191 root_makefile.write(ROOT_HEADER_SUFFIX_RULES)
1192
1193 # Find the list of targets that derive from the gyp file(s) being built.
1194 needed_targets = set()
1195 for build_file in params['build_files']:
1196 for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
1197 needed_targets.add(target)
1198
1199 build_files = set()
1200 include_list = set()
1201 for qualified_target in target_list:
1202 build_file, target, toolset = gyp.common.ParseQualifiedTarget(
1203 qualified_target)
1204 build_files.add(gyp.common.RelativePath(build_file, options.depth))
1205 included_files = data[build_file]['included_files']
1206 for included_file in included_files:
1207 # The included_files entries are relative to the dir of the build file
1208 # that included them, so we have to undo that and then make them relative
1209 # to the root dir.
1210 relative_include_file = gyp.common.RelativePath(
1211 gyp.common.UnrelativePath(included_file, build_file), options.depth)
1212 abs_include_file = os.path.abspath(relative_include_file)
1213 # If the include file is from the ~/.gyp dir, we should use absolute path
1214 # so that relocating the src dir doesn't break the path.
1215 if (params['home_dot_gyp'] and
1216 abs_include_file.startswith(params['home_dot_gyp'])):
1217 build_files.add(abs_include_file)
1218 else:
1219 build_files.add(relative_include_file)
1220
1221 (base_path, output_file) = CalculateMakefilePath(build_file,
1222 target + '.' + toolset + options.suffix + '.mk')
1223
1224 spec = target_dicts[qualified_target]
1225 configs = spec['configurations']
1226
1227 writer = MakefileWriter()
1228 writer.Write(qualified_target, base_path, output_file, spec, configs,
1229 part_of_all=qualified_target in needed_targets)
1230
1231 # Our root_makefile lives at the source root. Compute the relative path
1232 # from there to the output_file for including.
1233 mkfile_rel_path = gyp.common.RelativePath(output_file,
1234 os.path.dirname(makefile_path))
1235 include_list.add('include ' + mkfile_rel_path + '\n')
1236
1237 # Write out per-gyp (sub-project) Makefiles.
1238 depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
1239 for build_file in build_files:
1240 # The paths in build_files were relativized above, so undo that before
1241 # testing against the non-relativized items in target_list and before
1242 # calculating the Makefile path.
1243 build_file = os.path.join(depth_rel_path, build_file)
1244 gyp_targets = [target_dicts[target]['target_name'] for target in target_list
1245 if target.startswith(build_file) and
1246 target in needed_targets]
1247 # Only generate Makefiles for gyp files with targets.
1248 if not gyp_targets:
1249 continue
1250 (base_path, output_file) = CalculateMakefilePath(build_file,
1251 os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
1252 makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
1253 os.path.dirname(output_file))
1254 writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
1255 builddir_name)
1256
1257
1258 # Write out the sorted list of includes.
1259 root_makefile.write('\n')
1260 for include in sorted(include_list):
1261 root_makefile.write(include)
1262 root_makefile.write('\n')
1263
1264 # Write the target to regenerate the Makefile.
1265 if generator_flags.get('auto_regeneration', True):
1266 build_files_args = [gyp.common.RelativePath(filename, options.depth)
1267 for filename in params['build_files_arg']]
1268 gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
1269 options.depth)
1270 if not gyp_binary.startswith(os.sep):
1271 gyp_binary = os.path.join('.', gyp_binary)
1272 root_makefile.write("%s: %s\n\t%s\n" % (
1273 makefile_name,
1274 ' '.join(map(Sourceify, build_files)),
1275 gyp.common.EncodePOSIXShellList(
1276 [gyp_binary, '-fmake'] +
1277 gyp.RegenerateFlags(options) +
1278 build_files_args)))
1279
1280 root_makefile.write(SHARED_FOOTER)
1281
1282 root_makefile.close()
+0
-1216
mozc_build_tools/gyp/pylib/gyp/generator/msvs.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import ntpath
7 import posixpath
8 import os
9 import re
10 import subprocess
11 import sys
12
13 import gyp.MSVSNew as MSVSNew
14 import gyp.MSVSProject as MSVSProject
15 import gyp.MSVSToolFile as MSVSToolFile
16 import gyp.MSVSUserFile as MSVSUserFile
17 import gyp.MSVSVersion as MSVSVersion
18 import gyp.common
19
20
21 # Regular expression for validating Visual Studio GUIDs. If the GUID
22 # contains lowercase hex letters, MSVS will be fine. However,
23 # IncrediBuild BuildConsole will parse the solution file, but then
24 # silently skip building the target causing hard to track down errors.
25 # Note that this only happens with the BuildConsole, and does not occur
26 # if IncrediBuild is executed from inside Visual Studio. This regex
27 # validates that the string looks like a GUID with all uppercase hex
28 # letters.
29 VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
30
31
32 generator_default_variables = {
33 'EXECUTABLE_PREFIX': '',
34 'EXECUTABLE_SUFFIX': '.exe',
35 'STATIC_LIB_PREFIX': '',
36 'SHARED_LIB_PREFIX': '',
37 'STATIC_LIB_SUFFIX': '.lib',
38 'SHARED_LIB_SUFFIX': '.dll',
39 'INTERMEDIATE_DIR': '$(IntDir)',
40 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate',
41 'OS': 'win',
42 'PRODUCT_DIR': '$(OutDir)',
43 'LIB_DIR': '$(OutDir)/lib',
44 'RULE_INPUT_ROOT': '$(InputName)',
45 'RULE_INPUT_EXT': '$(InputExt)',
46 'RULE_INPUT_NAME': '$(InputFileName)',
47 'RULE_INPUT_PATH': '$(InputPath)',
48 'CONFIGURATION_NAME': '$(ConfigurationName)',
49 }
50
51
52 # The msvs specific sections that hold paths
53 generator_additional_path_sections = [
54 'msvs_cygwin_dirs',
55 'msvs_props',
56 ]
57
58 generator_additional_non_configuration_keys = [
59 'msvs_cygwin_dirs',
60 'msvs_cygwin_shell',
61 ]
62
63 cached_username = None
64 cached_domain = None
65
66 # TODO(gspencer): Switch the os.environ calls to be
67 # win32api.GetDomainName() and win32api.GetUserName() once the
68 # python version in depot_tools has been updated to work on Vista
69 # 64-bit.
70 def _GetDomainAndUserName():
71 if sys.platform not in ('win32', 'cygwin'):
72 return ('DOMAIN', 'USERNAME')
73 global cached_username
74 global cached_domain
75 if not cached_domain or not cached_username:
76 domain = os.environ.get('USERDOMAIN')
77 username = os.environ.get('USERNAME')
78 if not domain or not username:
79 call = subprocess.Popen(['net', 'config', 'Workstation'],
80 stdout=subprocess.PIPE)
81 config = call.communicate()[0]
82 username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
83 username_match = username_re.search(config)
84 if username_match:
85 username = username_match.group(1)
86 domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
87 domain_match = domain_re.search(config)
88 if domain_match:
89 domain = domain_match.group(1)
90 cached_domain = domain
91 cached_username = username
92 return (cached_domain, cached_username)
93
94 fixpath_prefix = None
95
96 def _FixPath(path):
97 """Convert paths to a form that will make sense in a vcproj file.
98
99 Arguments:
100 path: The path to convert, may contain / etc.
101 Returns:
102 The path with all slashes made into backslashes.
103 """
104 if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
105 path = os.path.join(fixpath_prefix, path)
106 path = path.replace('/', '\\')
107 if len(path) > 0 and path[-1] == '\\':
108 path = path[:-1]
109 return path
110
111
112 def _SourceInFolders(sources, prefix=None, excluded=None):
113 """Converts a list split source file paths into a vcproj folder hierarchy.
114
115 Arguments:
116 sources: A list of source file paths split.
117 prefix: A list of source file path layers meant to apply to each of sources.
118 Returns:
119 A hierarchy of filenames and MSVSProject.Filter objects that matches the
120 layout of the source tree.
121 For example:
122 _SourceInFolders([['a', 'bob1.c'], ['b', 'bob2.c']], prefix=['joe'])
123 -->
124 [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
125 MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
126 """
127 if not prefix: prefix = []
128 result = []
129 excluded_result = []
130 folders = dict()
131 # Gather files into the final result, excluded, or folders.
132 for s in sources:
133 if len(s) == 1:
134 filename = '\\'.join(prefix + s)
135 if filename in excluded:
136 excluded_result.append(filename)
137 else:
138 result.append(filename)
139 else:
140 if not folders.get(s[0]):
141 folders[s[0]] = []
142 folders[s[0]].append(s[1:])
143 # Add a folder for excluded files.
144 if excluded_result:
145 excluded_folder = MSVSProject.Filter('_excluded_files',
146 contents=excluded_result)
147 result.append(excluded_folder)
148 # Populate all the folders.
149 for f in folders:
150 contents = _SourceInFolders(folders[f], prefix=prefix + [f],
151 excluded=excluded)
152 contents = MSVSProject.Filter(f, contents=contents)
153 result.append(contents)
154
155 return result
156
157
158 def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
159 if not value: return
160 # TODO(bradnelson): ugly hack, fix this more generally!!!
161 if 'Directories' in setting or 'Dependencies' in setting:
162 if type(value) == str:
163 value = value.replace('/', '\\')
164 else:
165 value = [i.replace('/', '\\') for i in value]
166 if not tools.get(tool_name):
167 tools[tool_name] = dict()
168 tool = tools[tool_name]
169 if tool.get(setting):
170 if only_if_unset: return
171 if type(tool[setting]) == list:
172 tool[setting] += value
173 else:
174 raise TypeError(
175 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
176 'not allowed, previous value: %s' % (
177 value, setting, tool_name, str(tool[setting])))
178 else:
179 tool[setting] = value
180
181
182 def _ConfigPlatform(config_data):
183 return config_data.get('msvs_configuration_platform', 'Win32')
184
185
186 def _ConfigBaseName(config_name, platform_name):
187 if config_name.endswith('_' + platform_name):
188 return config_name[0:-len(platform_name)-1]
189 else:
190 return config_name
191
192
193 def _ConfigFullName(config_name, config_data):
194 platform_name = _ConfigPlatform(config_data)
195 return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
196
197
198 def _PrepareActionRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd):
199 if cygwin_shell:
200 # Find path to cygwin.
201 cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
202 # Prepare command.
203 direct_cmd = cmd
204 direct_cmd = [i.replace('$(IntDir)',
205 '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
206 direct_cmd = [i.replace('$(OutDir)',
207 '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
208 if has_input_path:
209 direct_cmd = [i.replace('$(InputPath)',
210 '`cygpath -m "${INPUTPATH}"`')
211 for i in direct_cmd]
212 direct_cmd = ['"%s"' % i for i in direct_cmd]
213 direct_cmd = [i.replace('"', '\\"') for i in direct_cmd]
214 #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
215 direct_cmd = ' '.join(direct_cmd)
216 # TODO(quote): regularize quoting path names throughout the module
217 cmd = (
218 '"$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
219 'set CYGWIN=nontsec&& ')
220 if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
221 cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
222 if direct_cmd.find('INTDIR') >= 0:
223 cmd += 'set INTDIR=$(IntDir)&& '
224 if direct_cmd.find('OUTDIR') >= 0:
225 cmd += 'set OUTDIR=$(OutDir)&& '
226 if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
227 cmd += 'set INPUTPATH=$(InputPath) && '
228 cmd += (
229 'bash -c "%(cmd)s"')
230 cmd = cmd % {'cygwin_dir': cygwin_dir,
231 'cmd': direct_cmd}
232 return cmd
233 else:
234 # Convert cat --> type to mimic unix.
235 if cmd[0] == 'cat':
236 cmd = ['type'] + cmd[1:]
237 if quote_cmd:
238 # Support a mode for using cmd directly.
239 # Convert any paths to native form (first element is used directly).
240 # TODO(quote): regularize quoting path names throughout the module
241 direct_cmd = ([cmd[0].replace('/', '\\')] +
242 ['"%s"' % _FixPath(i) for i in cmd[1:]])
243 else:
244 direct_cmd = ([cmd[0].replace('/', '\\')] +
245 [_FixPath(i) for i in cmd[1:]])
246 # Collapse into a single command.
247 return ' '.join(direct_cmd)
248
249 def _PrepareAction(spec, rule, has_input_path):
250 # Find path to cygwin.
251 cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
252
253 # Currently this weird argument munging is used to duplicate the way a
254 # python script would need to be run as part of the chrome tree.
255 # Eventually we should add some sort of rule_default option to set this
256 # per project. For now the behavior chrome needs is the default.
257 mcs = rule.get('msvs_cygwin_shell')
258 if mcs is None:
259 mcs = int(spec.get('msvs_cygwin_shell', 1))
260 elif isinstance(mcs, str):
261 mcs = int(mcs)
262 quote_cmd = int(rule.get('msvs_quote_cmd', 1))
263 return _PrepareActionRaw(spec, rule['action'], mcs,
264 has_input_path, quote_cmd)
265
266
267 def _PickPrimaryInput(inputs):
268 # Pick second input as the primary one, unless there's only one.
269 # TODO(bradnelson): this is a bit of a hack,
270 # find something more general.
271 if len(inputs) > 1:
272 return inputs[1]
273 else:
274 return inputs[0]
275
276 def _SetRunAs(user_file, config_name, c_data, command,
277 environment={}, working_directory=""):
278 """Add a run_as rule to the user file.
279
280 Arguments:
281 user_file: The MSVSUserFile to add the command to.
282 config_name: The name of the configuration to add it to
283 c_data: The dict of the configuration to add it to
284 command: The path to the command to execute.
285 args: An array of arguments to the command. (optional)
286 working_directory: Directory to run the command in. (optional)
287 """
288 user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
289 command, environment, working_directory)
290
291 def _AddCustomBuildTool(p, spec, inputs, outputs, description, cmd):
292 """Add a custom build tool to execute something.
293
294 Arguments:
295 p: the target project
296 spec: the target project dict
297 inputs: list of inputs
298 outputs: list of outputs
299 description: description of the action
300 cmd: command line to execute
301 """
302 inputs = [_FixPath(i) for i in inputs]
303 outputs = [_FixPath(i) for i in outputs]
304 tool = MSVSProject.Tool(
305 'VCCustomBuildTool', {
306 'Description': description,
307 'AdditionalDependencies': ';'.join(inputs),
308 'Outputs': ';'.join(outputs),
309 'CommandLine': cmd,
310 })
311 primary_input = _PickPrimaryInput(inputs)
312 # Add to the properties of primary input for each config.
313 for config_name, c_data in spec['configurations'].iteritems():
314 p.AddFileConfig(primary_input,
315 _ConfigFullName(config_name, c_data), tools=[tool])
316
317
318 def _RuleExpandPath(path, input_file):
319 """Given the input file to which a rule applied, string substitute a path.
320
321 Arguments:
322 path: a path to string expand
323 input_file: the file to which the rule applied.
324 Returns:
325 The string substituted path.
326 """
327 path = path.replace('$(InputName)',
328 os.path.splitext(os.path.split(input_file)[1])[0])
329 path = path.replace('$(InputExt)',
330 os.path.splitext(os.path.split(input_file)[1])[1])
331 path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
332 path = path.replace('$(InputPath)', input_file)
333 return path
334
335
336 def _FindRuleTriggerFiles(rule, sources):
337 """Find the list of files which a particular rule applies to.
338
339 Arguments:
340 rule: the rule in question
341 sources: the set of all known source files for this project
342 Returns:
343 The list of sources that trigger a particular rule.
344 """
345 rule_ext = rule['extension']
346 return [s for s in sources if s.endswith('.' + rule_ext)]
347
348
349 def _RuleInputsAndOutputs(rule, trigger_file):
350 """Find the inputs and outputs generated by a rule.
351
352 Arguments:
353 rule: the rule in question
354 sources: the set of all known source files for this project
355 Returns:
356 The pair of (inputs, outputs) involved in this rule.
357 """
358 raw_inputs = rule.get('inputs', [])
359 raw_outputs = rule.get('outputs', [])
360 inputs = set()
361 outputs = set()
362 inputs.add(trigger_file)
363 for i in raw_inputs:
364 inputs.add(_RuleExpandPath(i, trigger_file))
365 for o in raw_outputs:
366 outputs.add(_RuleExpandPath(o, trigger_file))
367 return (inputs, outputs)
368
369
370 def _GenerateNativeRules(p, rules, output_dir, spec, options):
371 """Generate a native rules file.
372
373 Arguments:
374 p: the target project
375 rules: the set of rules to include
376 output_dir: the directory in which the project/gyp resides
377 spec: the project dict
378 options: global generator options
379 """
380 rules_filename = '%s%s.rules' % (spec['target_name'],
381 options.suffix)
382 rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename))
383 rules_file.Create(spec['target_name'])
384 # Add each rule.
385 for r in rules:
386 rule_name = r['rule_name']
387 rule_ext = r['extension']
388 inputs = [_FixPath(i) for i in r.get('inputs', [])]
389 outputs = [_FixPath(i) for i in r.get('outputs', [])]
390 cmd = _PrepareAction(spec, r, has_input_path=True)
391 rules_file.AddCustomBuildRule(name=rule_name,
392 description=r.get('message', rule_name),
393 extensions=[rule_ext],
394 additional_dependencies=inputs,
395 outputs=outputs,
396 cmd=cmd)
397 # Write out rules file.
398 rules_file.Write()
399
400 # Add rules file to project.
401 p.AddToolFile(rules_filename)
402
403
404 def _Cygwinify(path):
405 path = path.replace('$(OutDir)', '$(OutDirCygwin)')
406 path = path.replace('$(IntDir)', '$(IntDirCygwin)')
407 return path
408
409
410 def _GenerateExternalRules(p, rules, output_dir, spec,
411 sources, options, actions_to_add):
412 """Generate an external makefile to do a set of rules.
413
414 Arguments:
415 p: the target project
416 rules: the list of rules to include
417 output_dir: path containing project and gyp files
418 spec: project specification data
419 sources: set of sources known
420 options: global generator options
421 """
422 filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
423 file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
424 # Find cygwin style versions of some paths.
425 file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
426 file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
427 # Gather stuff needed to emit all: target.
428 all_inputs = set()
429 all_outputs = set()
430 all_output_dirs = set()
431 first_outputs = []
432 for rule in rules:
433 trigger_files = _FindRuleTriggerFiles(rule, sources)
434 for tf in trigger_files:
435 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
436 all_inputs.update(set(inputs))
437 all_outputs.update(set(outputs))
438 # Only use one target from each rule as the dependency for
439 # 'all' so we don't try to build each rule multiple times.
440 first_outputs.append(list(outputs)[0])
441 # Get the unique output directories for this rule.
442 output_dirs = [os.path.split(i)[0] for i in outputs]
443 for od in output_dirs:
444 all_output_dirs.add(od)
445 first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
446 # Write out all: target, including mkdir for each output directory.
447 file.write('all: %s\n' % ' '.join(first_outputs_cyg))
448 for od in all_output_dirs:
449 file.write('\tmkdir -p %s\n' % od)
450 file.write('\n')
451 # Define how each output is generated.
452 for rule in rules:
453 trigger_files = _FindRuleTriggerFiles(rule, sources)
454 for tf in trigger_files:
455 # Get all the inputs and outputs for this rule for this trigger file.
456 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
457 inputs = [_Cygwinify(i) for i in inputs]
458 outputs = [_Cygwinify(i) for i in outputs]
459 # Prepare the command line for this rule.
460 cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
461 cmd = ['"%s"' % i for i in cmd]
462 cmd = ' '.join(cmd)
463 # Add it to the makefile.
464 file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
465 file.write('\t%s\n\n' % cmd)
466 # Close up the file.
467 file.close()
468
469 # Add makefile to list of sources.
470 sources.add(filename)
471 # Add a build action to call makefile.
472 cmd = ['make',
473 'OutDir=$(OutDir)',
474 'IntDir=$(IntDir)',
475 '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
476 '-f', filename]
477 cmd = _PrepareActionRaw(spec, cmd, True, False, True)
478 # TODO(bradnelson): this won't be needed if we have a better way to pick
479 # the primary input.
480 all_inputs = list(all_inputs)
481 all_inputs.insert(1, filename)
482 actions_to_add.append({
483 'inputs': [_FixPath(i) for i in all_inputs],
484 'outputs': [_FixPath(i) for i in all_outputs],
485 'description': 'Running %s' % cmd,
486 'cmd': cmd,
487 })
488
489
490 def _EscapeEnvironmentVariableExpansion(s):
491 """Escapes any % characters so that Windows-style environment variable
492 expansions will leave them alone.
493 See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
494 to understand why we have to do this."""
495 s = s.replace('%', '%%')
496 return s
497
498
499 quote_replacer_regex = re.compile(r'(\\*)"')
500 def _EscapeCommandLineArgument(s):
501 """Escapes a Windows command-line argument, so that the Win32
502 CommandLineToArgv function will turn the escaped result back into the
503 original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
504 ("Parsing C++ Command-Line Arguments") to understand why we have to do
505 this."""
506 def replace(match):
507 # For a literal quote, CommandLineToArgv requires an odd number of
508 # backslashes preceding it, and it produces half as many literal backslashes
509 # (rounded down). So we need to produce 2n+1 backslashes.
510 return 2 * match.group(1) + '\\"'
511 # Escape all quotes so that they are interpreted literally.
512 s = quote_replacer_regex.sub(replace, s)
513 # Now add unescaped quotes so that any whitespace is interpreted literally.
514 s = '"' + s + '"'
515 return s
516
517
518 delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
519 def _EscapeVCProjCommandLineArgListItem(s):
520 """The VCProj format stores string lists in a single string using commas and
521 semi-colons as separators, which must be quoted if they are to be
522 interpreted literally. However, command-line arguments may already have
523 quotes, and the VCProj parser is ignorant of the backslash escaping
524 convention used by CommandLineToArgv, so the command-line quotes and the
525 VCProj quotes may not be the same quotes. So to store a general
526 command-line argument in a VCProj list, we need to parse the existing
527 quoting according to VCProj's convention and quote any delimiters that are
528 not already quoted by that convention. The quotes that we add will also be
529 seen by CommandLineToArgv, so if backslashes precede them then we also have
530 to escape those backslashes according to the CommandLineToArgv
531 convention."""
532 def replace(match):
533 # For a non-literal quote, CommandLineToArgv requires an even number of
534 # backslashes preceding it, and it produces half as many literal
535 # backslashes. So we need to produce 2n backslashes.
536 return 2 * match.group(1) + '"' + match.group(2) + '"'
537 list = s.split('"')
538 # The unquoted segments are at the even-numbered indices.
539 for i in range(0, len(list), 2):
540 list[i] = delimiters_replacer_regex.sub(replace, list[i])
541 # Concatenate back into a single string
542 s = '"'.join(list)
543 if len(list) % 2 == 0:
544 # String ends while still quoted according to VCProj's convention. This
545 # means the delimiter and the next list item that follow this one in the
546 # .vcproj file will be misinterpreted as part of this item. There is nothing
547 # we can do about this. Adding an extra quote would correct the problem in
548 # the VCProj but cause the same problem on the final command-line. Moving
549 # the item to the end of the list does works, but that's only possible if
550 # there's only one such item. Let's just warn the user.
551 print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
552 'quotes in ' + s)
553 return s
554
555
556 def _EscapeCppDefine(s):
557 """Escapes a CPP define so that it will reach the compiler unaltered."""
558 s = _EscapeEnvironmentVariableExpansion(s)
559 s = _EscapeCommandLineArgument(s)
560 s = _EscapeVCProjCommandLineArgListItem(s)
561 return s
562
563
564 def _GenerateRules(p, output_dir, options, spec,
565 sources, excluded_sources,
566 actions_to_add):
567 """Generate all the rules for a particular project.
568
569 Arguments:
570 output_dir: directory to emit rules to
571 options: global options passed to the generator
572 spec: the specification for this project
573 sources: the set of all known source files in this project
574 excluded_sources: the set of sources excluded from normal processing
575 actions_to_add: deferred list of actions to add in
576 """
577 rules = spec.get('rules', [])
578 rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
579 rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
580
581 # Handle rules that use a native rules file.
582 if rules_native:
583 _GenerateNativeRules(p, rules_native, output_dir, spec, options)
584
585 # Handle external rules (non-native rules).
586 if rules_external:
587 _GenerateExternalRules(p, rules_external, output_dir, spec,
588 sources, options, actions_to_add)
589
590 # Add outputs generated by each rule (if applicable).
591 for rule in rules:
592 # Done if not processing outputs as sources.
593 if int(rule.get('process_outputs_as_sources', False)):
594 # Add in the outputs from this rule.
595 trigger_files = _FindRuleTriggerFiles(rule, sources)
596 for tf in trigger_files:
597 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
598 inputs.remove(tf)
599 sources.update(inputs)
600 excluded_sources.update(inputs)
601 sources.update(outputs)
602
603
604 def _GenerateProject(vcproj_filename, build_file, spec, options, version):
605 """Generates a vcproj file.
606
607 Arguments:
608 vcproj_filename: Filename of the vcproj file to generate.
609 build_file: Filename of the .gyp file that the vcproj file comes from.
610 spec: The target dictionary containing the properties of the target.
611 """
612 # Pluck out the default configuration.
613 default_config = spec['configurations'][spec['default_configuration']]
614 # Decide the guid of the project.
615 guid = default_config.get('msvs_guid')
616 if guid:
617 if VALID_MSVS_GUID_CHARS.match(guid) == None:
618 raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
619 (guid, VALID_MSVS_GUID_CHARS.pattern))
620 guid = '{%s}' % guid
621
622 # Skip emitting anything if told to with msvs_existing_vcproj option.
623 if default_config.get('msvs_existing_vcproj'):
624 return guid
625
626 #print 'Generating %s' % vcproj_filename
627
628 vcproj_dir = os.path.dirname(vcproj_filename)
629 if vcproj_dir and not os.path.exists(vcproj_dir):
630 os.makedirs(vcproj_dir)
631
632 # Gather list of unique platforms.
633 platforms = set()
634 for configuration in spec['configurations']:
635 platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
636 platforms = list(platforms)
637
638 p = MSVSProject.Writer(vcproj_filename, version=version)
639 p.Create(spec['target_name'], guid=guid, platforms=platforms)
640
641 # Create the user file.
642 (domain, username) = _GetDomainAndUserName()
643 vcuser_filename = '.'.join([vcproj_filename, domain, username, 'user'])
644 user_file = MSVSUserFile.Writer(vcuser_filename, version=version)
645 user_file.Create(spec['target_name'])
646
647 # Get directory project file is in.
648 gyp_dir = os.path.split(vcproj_filename)[0]
649
650 # Pick target configuration type.
651 try:
652 config_type = {
653 'executable': '1', # .exe
654 'shared_library': '2', # .dll
655 'loadable_module': '2', # .dll
656 'static_library': '4', # .lib
657 'none': '10', # Utility type
658 'dummy_executable': '1', # .exe
659 }[spec['type']]
660 except KeyError, e:
661 if spec.get('type'):
662 raise Exception('Target type %s is not a valid target type for '
663 'target %s in %s.' %
664 (spec['type'], spec['target_name'], build_file))
665 else:
666 raise Exception('Missing type field for target %s in %s.' %
667 (spec['target_name'], build_file))
668
669 for config_name, c in spec['configurations'].iteritems():
670 # Process each configuration.
671 vsprops_dirs = c.get('msvs_props', [])
672 vsprops_dirs = [_FixPath(i) for i in vsprops_dirs]
673
674 # Prepare the list of tools as a dictionary.
675 tools = dict()
676
677 # Add in msvs_settings.
678 for tool in c.get('msvs_settings', {}):
679 settings = c['msvs_settings'][tool]
680 for setting in settings:
681 _ToolAppend(tools, tool, setting, settings[setting])
682
683 # Add in includes.
684 # TODO(bradnelson): include_dirs should really be flexible enough not to
685 # require this sort of thing.
686 include_dirs = (
687 c.get('include_dirs', []) +
688 c.get('msvs_system_include_dirs', []))
689 resource_include_dirs = c.get('resource_include_dirs', include_dirs)
690 include_dirs = [_FixPath(i) for i in include_dirs]
691 resource_include_dirs = [_FixPath(i) for i in resource_include_dirs]
692 _ToolAppend(tools, 'VCCLCompilerTool',
693 'AdditionalIncludeDirectories', include_dirs)
694 _ToolAppend(tools, 'VCResourceCompilerTool',
695 'AdditionalIncludeDirectories', resource_include_dirs)
696
697 # Add in libraries.
698 libraries = spec.get('libraries', [])
699 # Strip out -l, as it is not used on windows (but is needed so we can pass
700 # in libraries that are assumed to be in the default library path).
701 libraries = [re.sub('^(\-l)', '', lib) for lib in libraries]
702 # Add them.
703 _ToolAppend(tools, 'VCLinkerTool',
704 'AdditionalDependencies', libraries)
705
706 # Select a name for the output file.
707 output_file_map = {
708 'executable': ('VCLinkerTool', '$(OutDir)\\', '.exe'),
709 'shared_library': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
710 'loadable_module': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
711 'static_library': ('VCLibrarianTool', '$(OutDir)\\lib\\', '.lib'),
712 'dummy_executable': ('VCLinkerTool', '$(IntDir)\\', '.junk'),
713 }
714 output_file_props = output_file_map.get(spec['type'])
715 if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
716 vc_tool, out_dir, suffix = output_file_props
717 out_dir = spec.get('product_dir', out_dir)
718 product_extension = spec.get('product_extension')
719 if product_extension:
720 suffix = '.' + product_extension
721 prefix = spec.get('product_prefix', '')
722 product_name = spec.get('product_name', '$(ProjectName)')
723 out_file = ntpath.join(out_dir, prefix + product_name + suffix)
724 _ToolAppend(tools, vc_tool, 'OutputFile', out_file,
725 only_if_unset=True)
726
727 # Add defines.
728 defines = []
729 for d in c.get('defines', []):
730 if type(d) == list:
731 fd = '='.join([str(dpart) for dpart in d])
732 else:
733 fd = str(d)
734 fd = _EscapeCppDefine(fd)
735 defines.append(fd)
736
737 _ToolAppend(tools, 'VCCLCompilerTool',
738 'PreprocessorDefinitions', defines)
739 _ToolAppend(tools, 'VCResourceCompilerTool',
740 'PreprocessorDefinitions', defines)
741
742 # Change program database directory to prevent collisions.
743 _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
744 '$(IntDir)\\$(ProjectName)\\vc80.pdb')
745
746 # Add disabled warnings.
747 disabled_warnings = [str(i) for i in c.get('msvs_disabled_warnings', [])]
748 _ToolAppend(tools, 'VCCLCompilerTool',
749 'DisableSpecificWarnings', disabled_warnings)
750
751 # Add Pre-build.
752 prebuild = c.get('msvs_prebuild')
753 _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
754
755 # Add Post-build.
756 postbuild = c.get('msvs_postbuild')
757 _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
758
759 # Turn on precompiled headers if appropriate.
760 header = c.get('msvs_precompiled_header')
761 if header:
762 header = os.path.split(header)[1]
763 _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
764 _ToolAppend(tools, 'VCCLCompilerTool',
765 'PrecompiledHeaderThrough', header)
766 _ToolAppend(tools, 'VCCLCompilerTool',
767 'ForcedIncludeFiles', header)
768
769 # Loadable modules don't generate import libraries;
770 # tell dependent projects to not expect one.
771 if spec['type'] == 'loadable_module':
772 _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
773
774 # Set the module definition file if any.
775 if spec['type'] in ['shared_library', 'loadable_module']:
776 def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
777 if len(def_files) == 1:
778 _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile',
779 _FixPath(def_files[0]))
780 elif def_files:
781 raise ValueError('Multiple module definition files in one target, '
782 'target %s lists multiple .def files: %s' % (
783 spec['target_name'], ' '.join(def_files)))
784
785 # Convert tools to expected form.
786 tool_list = []
787 for tool, settings in tools.iteritems():
788 # Collapse settings with lists.
789 settings_fixed = {}
790 for setting, value in settings.iteritems():
791 if type(value) == list:
792 if ((tool == 'VCLinkerTool' and
793 setting == 'AdditionalDependencies') or
794 setting == 'AdditionalOptions'):
795 settings_fixed[setting] = ' '.join(value)
796 else:
797 settings_fixed[setting] = ';'.join(value)
798 else:
799 settings_fixed[setting] = value
800 # Add in this tool.
801 tool_list.append(MSVSProject.Tool(tool, settings_fixed))
802
803 # Prepare configuration attributes.
804 prepared_attrs = {}
805 source_attrs = c.get('msvs_configuration_attributes', {})
806 for a in source_attrs:
807 prepared_attrs[a] = source_attrs[a]
808 # Add props files.
809 if vsprops_dirs:
810 prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
811 # Set configuration type.
812 prepared_attrs['ConfigurationType'] = config_type
813 if not prepared_attrs.has_key('OutputDirectory'):
814 prepared_attrs['OutputDirectory'] = '$(SolutionDir)$(ConfigurationName)'
815 if not prepared_attrs.has_key('IntermediateDirectory'):
816 intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
817 prepared_attrs['IntermediateDirectory'] = intermediate
818
819 # Add in this configuration.
820 p.AddConfig(_ConfigFullName(config_name, c),
821 attrs=prepared_attrs, tools=tool_list)
822
823 # Prepare list of sources and excluded sources.
824 sources = set(spec.get('sources', []))
825 excluded_sources = set()
826 # Add in the gyp file.
827 gyp_file = os.path.split(build_file)[1]
828 sources.add(gyp_file)
829 # Add in 'action' inputs and outputs.
830 for a in spec.get('actions', []):
831 inputs = a.get('inputs')
832 if not inputs:
833 # This is an action with no inputs. Make the primary input
834 # by the .gyp file itself so Visual Studio has a place to
835 # hang the custom build rule.
836 inputs = [gyp_file]
837 a['inputs'] = inputs
838 primary_input = _PickPrimaryInput(inputs)
839 inputs = set(inputs)
840 sources.update(inputs)
841 inputs.remove(primary_input)
842 excluded_sources.update(inputs)
843 if int(a.get('process_outputs_as_sources', False)):
844 outputs = set(a.get('outputs', []))
845 sources.update(outputs)
846 # Add in 'copies' inputs and outputs.
847 for cpy in spec.get('copies', []):
848 files = set(cpy.get('files', []))
849 sources.update(files)
850
851 # Add rules.
852 actions_to_add = []
853 _GenerateRules(p, gyp_dir, options, spec,
854 sources, excluded_sources,
855 actions_to_add)
856
857 # Exclude excluded sources coming into the generator.
858 excluded_sources.update(set(spec.get('sources_excluded', [])))
859 # Add excluded sources into sources for good measure.
860 sources.update(excluded_sources)
861 # Convert to proper windows form.
862 # NOTE: sources goes from being a set to a list here.
863 # NOTE: excluded_sources goes from being a set to a list here.
864 sources = [_FixPath(i) for i in sources]
865 # Convert to proper windows form.
866 excluded_sources = [_FixPath(i) for i in excluded_sources]
867
868 # If any non-native rules use 'idl' as an extension exclude idl files.
869 # Gather a list here to use later.
870 using_idl = False
871 for rule in spec.get('rules', []):
872 if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
873 using_idl = True
874 break
875 if using_idl:
876 excluded_idl = [i for i in sources if i.endswith('.idl')]
877 else:
878 excluded_idl = []
879
880 # List of precompiled header related keys.
881 precomp_keys = [
882 'msvs_precompiled_header',
883 'msvs_precompiled_source',
884 ]
885
886 # Gather a list of precompiled header related sources.
887 precompiled_related = []
888 for config_name, c in spec['configurations'].iteritems():
889 for k in precomp_keys:
890 f = c.get(k)
891 if f:
892 precompiled_related.append(_FixPath(f))
893
894 # Find the excluded ones, minus the precompiled header related ones.
895 fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
896
897 # Convert to folders and the right slashes.
898 sources = [i.split('\\') for i in sources]
899 sources = _SourceInFolders(sources, excluded=fully_excluded)
900 # Add in dummy file for type none.
901 if spec['type'] == 'dummy_executable':
902 # Pull in a dummy main so it can link successfully.
903 dummy_relpath = gyp.common.RelativePath(
904 options.depth + '\\tools\\gyp\\gyp_dummy.c', gyp_dir)
905 sources.append(dummy_relpath)
906 # Add in files.
907 p.AddFiles(sources)
908
909 # Add deferred actions to add.
910 for a in actions_to_add:
911 _AddCustomBuildTool(p, spec,
912 inputs=a['inputs'],
913 outputs=a['outputs'],
914 description=a['description'],
915 cmd=a['cmd'])
916
917 # Exclude excluded sources from being built.
918 for f in excluded_sources:
919 for config_name, c in spec['configurations'].iteritems():
920 precomped = [_FixPath(c.get(i, '')) for i in precomp_keys]
921 # Don't do this for ones that are precompiled header related.
922 if f not in precomped:
923 p.AddFileConfig(f, _ConfigFullName(config_name, c),
924 {'ExcludedFromBuild': 'true'})
925
926 # If any non-native rules use 'idl' as an extension exclude idl files.
927 # Exclude them now.
928 for config_name, c in spec['configurations'].iteritems():
929 for f in excluded_idl:
930 p.AddFileConfig(f, _ConfigFullName(config_name, c),
931 {'ExcludedFromBuild': 'true'})
932
933 # Add in tool files (rules).
934 tool_files = set()
935 for config_name, c in spec['configurations'].iteritems():
936 for f in c.get('msvs_tool_files', []):
937 tool_files.add(f)
938 for f in tool_files:
939 p.AddToolFile(f)
940
941 # Handle pre-compiled headers source stubs specially.
942 for config_name, c in spec['configurations'].iteritems():
943 source = c.get('msvs_precompiled_source')
944 if source:
945 source = _FixPath(source)
946 # UsePrecompiledHeader=1 for if using precompiled headers.
947 tool = MSVSProject.Tool('VCCLCompilerTool',
948 {'UsePrecompiledHeader': '1'})
949 p.AddFileConfig(source, _ConfigFullName(config_name, c),
950 {}, tools=[tool])
951
952 # Add actions.
953 actions = spec.get('actions', [])
954 for a in actions:
955 cmd = _PrepareAction(spec, a, has_input_path=False)
956 _AddCustomBuildTool(p, spec,
957 inputs=a.get('inputs', []),
958 outputs=a.get('outputs', []),
959 description=a.get('message', a['action_name']),
960 cmd=cmd)
961
962 # Add run_as and test targets.
963 has_run_as = False
964 if spec.get('run_as') or int(spec.get('test', 0)):
965 has_run_as = True
966 run_as = spec.get('run_as', {
967 'action' : ['$(TargetPath)', '--gtest_print_time'],
968 })
969 working_directory = run_as.get('working_directory', '.')
970 action = run_as.get('action', [])
971 environment = run_as.get('environment', [])
972 for config_name, c_data in spec['configurations'].iteritems():
973 _SetRunAs(user_file, config_name, c_data,
974 action, environment, working_directory)
975
976 # Add copies.
977 for cpy in spec.get('copies', []):
978 for src in cpy.get('files', []):
979 dst = os.path.join(cpy['destination'], os.path.basename(src))
980 # _AddCustomBuildTool() will call _FixPath() on the inputs and
981 # outputs, so do the same for our generated command line.
982 if src.endswith('/'):
983 src_bare = src[:-1]
984 base_dir = posixpath.split(src_bare)[0]
985 outer_dir = posixpath.split(src_bare)[1]
986 cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
987 _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
988 _AddCustomBuildTool(p, spec,
989 inputs=[src, build_file],
990 outputs=['dummy_copies', dst],
991 description='Copying %s to %s' % (src, dst),
992 cmd=cmd)
993 else:
994 cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
995 _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
996 _AddCustomBuildTool(p, spec,
997 inputs=[src], outputs=[dst],
998 description='Copying %s to %s' % (src, dst),
999 cmd=cmd)
1000
1001 # Write it out.
1002 p.Write()
1003
1004 # Write out the user file, but only if we need to.
1005 if has_run_as:
1006 user_file.Write()
1007
1008 # Return the guid so we can refer to it elsewhere.
1009 return p.guid
1010
1011
1012 def _GetPathDict(root, path):
1013 if path == '':
1014 return root
1015 parent, folder = os.path.split(path)
1016 parent_dict = _GetPathDict(root, parent)
1017 if folder not in parent_dict:
1018 parent_dict[folder] = dict()
1019 return parent_dict[folder]
1020
1021
1022 def _DictsToFolders(base_path, bucket, flat):
1023 # Convert to folders recursively.
1024 children = []
1025 for folder, contents in bucket.iteritems():
1026 if type(contents) == dict:
1027 folder_children = _DictsToFolders(os.path.join(base_path, folder),
1028 contents, flat)
1029 if flat:
1030 children += folder_children
1031 else:
1032 folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
1033 name='(' + folder + ')',
1034 entries=folder_children)
1035 children.append(folder_children)
1036 else:
1037 children.append(contents)
1038 return children
1039
1040
1041 def _CollapseSingles(parent, node):
1042 # Recursively explorer the tree of dicts looking for projects which are
1043 # the sole item in a folder which has the same name as the project. Bring
1044 # such projects up one level.
1045 if (type(node) == dict and
1046 len(node) == 1 and
1047 node.keys()[0] == parent + '.vcproj'):
1048 return node[node.keys()[0]]
1049 if type(node) != dict:
1050 return node
1051 for child in node.keys():
1052 node[child] = _CollapseSingles(child, node[child])
1053 return node
1054
1055
1056 def _GatherSolutionFolders(project_objs, flat):
1057 root = {}
1058 # Convert into a tree of dicts on path.
1059 for p in project_objs.keys():
1060 gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
1061 gyp_dir = os.path.dirname(gyp_file)
1062 path_dict = _GetPathDict(root, gyp_dir)
1063 path_dict[target + '.vcproj'] = project_objs[p]
1064 # Walk down from the top until we hit a folder that has more than one entry.
1065 # In practice, this strips the top-level "src/" dir from the hierarchy in
1066 # the solution.
1067 while len(root) == 1 and type(root[root.keys()[0]]) == dict:
1068 root = root[root.keys()[0]]
1069 # Collapse singles.
1070 root = _CollapseSingles('', root)
1071 # Merge buckets until everything is a root entry.
1072 return _DictsToFolders('', root, flat)
1073
1074
1075 def _ProjectObject(sln, qualified_target, project_objs, projects):
1076 # Done if this project has an object.
1077 if project_objs.get(qualified_target):
1078 return project_objs[qualified_target]
1079 # Get dependencies for this project.
1080 spec = projects[qualified_target]['spec']
1081 deps = spec.get('dependencies', [])
1082 # Get objects for each dependency.
1083 deps = [_ProjectObject(sln, d, project_objs, projects) for d in deps]
1084 # Find relative path to vcproj from sln.
1085 vcproj_rel_path = gyp.common.RelativePath(
1086 projects[qualified_target]['vcproj_path'], os.path.split(sln)[0])
1087 vcproj_rel_path = _FixPath(vcproj_rel_path)
1088 # Prepare a dict indicating which project configurations are used for which
1089 # solution configurations for this target.
1090 config_platform_overrides = {}
1091 for config_name, c in spec['configurations'].iteritems():
1092 config_fullname = _ConfigFullName(config_name, c)
1093 platform = c.get('msvs_target_platform', _ConfigPlatform(c))
1094 fixed_config_fullname = '%s|%s' % (
1095 _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
1096 config_platform_overrides[config_fullname] = fixed_config_fullname
1097 # Create object for this project.
1098 obj = MSVSNew.MSVSProject(
1099 vcproj_rel_path,
1100 name=spec['target_name'],
1101 guid=projects[qualified_target]['guid'],
1102 dependencies=deps,
1103 config_platform_overrides=config_platform_overrides)
1104 # Store it to the list of objects.
1105 project_objs[qualified_target] = obj
1106 # Return project object.
1107 return obj
1108
1109
1110 def CalculateVariables(default_variables, params):
1111 """Generated variables that require params to be known."""
1112
1113 generator_flags = params.get('generator_flags', {})
1114
1115 # Select project file format version (if unset, default to auto detecting).
1116 msvs_version = \
1117 MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version',
1118 'auto'))
1119 # Stash msvs_version for later (so we don't have to probe the system twice).
1120 params['msvs_version'] = msvs_version
1121
1122 # Set a variable so conditions can be based on msvs_version.
1123 default_variables['MSVS_VERSION'] = msvs_version.ShortName()
1124
1125 # To determine processor word size on Windows, in addition to checking
1126 # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
1127 # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
1128 # contains the actual word size of the system when running thru WOW64).
1129 if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
1130 os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
1131 default_variables['MSVS_OS_BITS'] = 64
1132 else:
1133 default_variables['MSVS_OS_BITS'] = 32
1134
1135
1136 def GenerateOutput(target_list, target_dicts, data, params):
1137 """Generate .sln and .vcproj files.
1138
1139 This is the entry point for this generator.
1140 Arguments:
1141 target_list: List of target pairs: 'base/base.gyp:base'.
1142 target_dicts: Dict of target properties keyed on target pair.
1143 data: Dictionary containing per .gyp data.
1144 """
1145 global fixpath_prefix
1146
1147 options = params['options']
1148 generator_flags = params.get('generator_flags', {})
1149
1150 # Get the project file format version back out of where we stashed it in
1151 # GeneratorCalculatedVariables.
1152 msvs_version = params['msvs_version']
1153
1154 # Prepare the set of configurations.
1155 configs = set()
1156 for qualified_target in target_list:
1157 build_file = gyp.common.BuildFile(qualified_target)
1158 spec = target_dicts[qualified_target]
1159 for config_name, c in spec['configurations'].iteritems():
1160 configs.add(_ConfigFullName(config_name, c))
1161 configs = list(configs)
1162
1163 # Generate each project.
1164 projects = {}
1165 for qualified_target in target_list:
1166 build_file = gyp.common.BuildFile(qualified_target)
1167 spec = target_dicts[qualified_target]
1168 if spec['toolset'] != 'target':
1169 raise Exception(
1170 'Multiple toolsets not supported in msvs build (target %s)' %
1171 qualified_target)
1172 default_config = spec['configurations'][spec['default_configuration']]
1173 vcproj_filename = default_config.get('msvs_existing_vcproj')
1174 if not vcproj_filename:
1175 vcproj_filename = spec['target_name'] + options.suffix + '.vcproj'
1176 vcproj_path = os.path.join(os.path.split(build_file)[0], vcproj_filename)
1177 if options.generator_output:
1178 projectDirPath = os.path.dirname(os.path.abspath(vcproj_path))
1179 vcproj_path = os.path.join(options.generator_output, vcproj_path)
1180 fixpath_prefix = gyp.common.RelativePath(projectDirPath,
1181 os.path.dirname(vcproj_path))
1182 projects[qualified_target] = {
1183 'vcproj_path': vcproj_path,
1184 'guid': _GenerateProject(vcproj_path, build_file,
1185 spec, options, version=msvs_version),
1186 'spec': spec,
1187 }
1188
1189 fixpath_prefix = None
1190
1191 for build_file in data.keys():
1192 # Validate build_file extension
1193 if build_file[-4:] != '.gyp':
1194 continue
1195 sln_path = build_file[:-4] + options.suffix + '.sln'
1196 if options.generator_output:
1197 sln_path = os.path.join(options.generator_output, sln_path)
1198 #print 'Generating %s' % sln_path
1199 # Get projects in the solution, and their dependents.
1200 sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
1201 sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
1202 # Convert projects to Project Objects.
1203 project_objs = {}
1204 for p in sln_projects:
1205 _ProjectObject(sln_path, p, project_objs, projects)
1206 # Create folder hierarchy.
1207 root_entries = _GatherSolutionFolders(
1208 project_objs, flat=msvs_version.FlatSolution())
1209 # Create solution.
1210 sln = MSVSNew.MSVSSolution(sln_path,
1211 entries=root_entries,
1212 variants=configs,
1213 websiteProperties=False,
1214 version=msvs_version)
1215 sln.Write()
+0
-1047
mozc_build_tools/gyp/pylib/gyp/generator/scons.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import gyp
7 import gyp.common
8 import gyp.SCons as SCons
9 import os.path
10 import pprint
11 import re
12
13
14 # TODO: remove when we delete the last WriteList() call in this module
15 WriteList = SCons.WriteList
16
17
18 generator_default_variables = {
19 'EXECUTABLE_PREFIX': '',
20 'EXECUTABLE_SUFFIX': '',
21 'STATIC_LIB_PREFIX': '${LIBPREFIX}',
22 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
23 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
24 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
25 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
26 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
27 'OS': 'linux',
28 'PRODUCT_DIR': '$TOP_BUILDDIR',
29 'SHARED_LIB_DIR': '$LIB_DIR',
30 'LIB_DIR': '$LIB_DIR',
31 'RULE_INPUT_ROOT': '${SOURCE.filebase}',
32 'RULE_INPUT_EXT': '${SOURCE.suffix}',
33 'RULE_INPUT_NAME': '${SOURCE.file}',
34 'RULE_INPUT_PATH': '${SOURCE.abspath}',
35 'CONFIGURATION_NAME': '${CONFIG_NAME}',
36 }
37
38 # Tell GYP how to process the input for us.
39 generator_handles_variants = True
40 generator_wants_absolute_build_file_paths = True
41
42
43 def FixPath(path, prefix):
44 if not os.path.isabs(path) and not path[0] == '$':
45 path = prefix + path
46 return path
47
48
49 header = """\
50 # This file is generated; do not edit.
51 """
52
53
54 _alias_template = """
55 if GetOption('verbose'):
56 _action = Action([%(action)s])
57 else:
58 _action = Action([%(action)s], %(message)s)
59 _outputs = env.Alias(
60 ['_%(target_name)s_action'],
61 %(inputs)s,
62 _action
63 )
64 env.AlwaysBuild(_outputs)
65 """
66
67 _run_as_template = """
68 if GetOption('verbose'):
69 _action = Action([%(action)s])
70 else:
71 _action = Action([%(action)s], %(message)s)
72 """
73
74 _run_as_template_suffix = """
75 _run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
76 env.Requires(_run_as_target, [
77 Alias('%(target_name)s'),
78 ])
79 env.AlwaysBuild(_run_as_target)
80 """
81
82 _command_template = """
83 if GetOption('verbose'):
84 _action = Action([%(action)s])
85 else:
86 _action = Action([%(action)s], %(message)s)
87 _outputs = env.Command(
88 %(outputs)s,
89 %(inputs)s,
90 _action
91 )
92 """
93
94 # This is copied from the default SCons action, updated to handle symlinks.
95 _copy_action_template = """
96 import shutil
97 import SCons.Action
98
99 def _copy_files_or_dirs_or_symlinks(dest, src):
100 SCons.Node.FS.invalidate_node_memos(dest)
101 if SCons.Util.is_List(src) and os.path.isdir(dest):
102 for file in src:
103 shutil.copy2(file, dest)
104 return 0
105 elif os.path.islink(src):
106 linkto = os.readlink(src)
107 os.symlink(linkto, dest)
108 return 0
109 elif os.path.isfile(src):
110 return shutil.copy2(src, dest)
111 else:
112 return shutil.copytree(src, dest, 1)
113
114 def _copy_files_or_dirs_or_symlinks_str(dest, src):
115 return 'Copying %s to %s ...' % (src, dest)
116
117 GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
118 _copy_files_or_dirs_or_symlinks_str,
119 convert=str)
120 """
121
122 _rule_template = """
123 %(name)s_additional_inputs = %(inputs)s
124 %(name)s_outputs = %(outputs)s
125 def %(name)s_emitter(target, source, env):
126 return (%(name)s_outputs, source + %(name)s_additional_inputs)
127 if GetOption('verbose'):
128 %(name)s_action = Action([%(action)s])
129 else:
130 %(name)s_action = Action([%(action)s], %(message)s)
131 env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
132 emitter=%(name)s_emitter)
133
134 _outputs = []
135 _processed_input_files = []
136 for infile in input_files:
137 if (type(infile) == type('')
138 and not os.path.isabs(infile)
139 and not infile[0] == '$'):
140 infile = %(src_dir)r + infile
141 if str(infile).endswith('.%(extension)s'):
142 _generated = env.%(name)s(infile)
143 env.Precious(_generated)
144 _outputs.append(_generated)
145 %(process_outputs_as_sources_line)s
146 else:
147 _processed_input_files.append(infile)
148 prerequisites.extend(_outputs)
149 input_files = _processed_input_files
150 """
151
152 _spawn_hack = """
153 import re
154 import SCons.Platform.posix
155 needs_shell = re.compile('["\\'><!^&]')
156 def gyp_spawn(sh, escape, cmd, args, env):
157 def strip_scons_quotes(arg):
158 if arg[0] == '"' and arg[-1] == '"':
159 return arg[1:-1]
160 return arg
161 stripped_args = [strip_scons_quotes(a) for a in args]
162 if needs_shell.search(' '.join(stripped_args)):
163 return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
164 else:
165 return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
166 """
167
168
169 def EscapeShellArgument(s):
170 """Quotes an argument so that it will be interpreted literally by a POSIX
171 shell. Taken from
172 http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
173 """
174 return "'" + s.replace("'", "'\\''") + "'"
175
176
177 def InvertNaiveSConsQuoting(s):
178 """SCons tries to "help" with quoting by naively putting double-quotes around
179 command-line arguments containing space or tab, which is broken for all
180 but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
181 if ' ' in s or '\t' in s:
182 # Then SCons will put double-quotes around this, so add our own quotes
183 # to close its quotes at the beginning and end.
184 s = '"' + s + '"'
185 return s
186
187
188 def EscapeSConsVariableExpansion(s):
189 """SCons has its own variable expansion syntax using $. We must escape it for
190 strings to be interpreted literally. For some reason this requires four
191 dollar signs, not two, even without the shell involved."""
192 return s.replace('$', '$$$$')
193
194
195 def EscapeCppDefine(s):
196 """Escapes a CPP define so that it will reach the compiler unaltered."""
197 s = EscapeShellArgument(s)
198 s = InvertNaiveSConsQuoting(s)
199 s = EscapeSConsVariableExpansion(s)
200 return s
201
202
203 def GenerateConfig(fp, config, indent='', src_dir=''):
204 """
205 Generates SCons dictionary items for a gyp configuration.
206
207 This provides the main translation between the (lower-case) gyp settings
208 keywords and the (upper-case) SCons construction variables.
209 """
210 var_mapping = {
211 'ASFLAGS' : 'asflags',
212 'CCFLAGS' : 'cflags',
213 'CFLAGS' : 'cflags_c',
214 'CXXFLAGS' : 'cflags_cc',
215 'CPPDEFINES' : 'defines',
216 'CPPPATH' : 'include_dirs',
217 # Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
218 # SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
219 # listing both here would case 'ldflags' to get appended to
220 # both, and then have it show up twice on the command line.
221 'LINKFLAGS' : 'ldflags',
222 }
223 postamble='\n%s],\n' % indent
224 for scons_var in sorted(var_mapping.keys()):
225 gyp_var = var_mapping[scons_var]
226 value = config.get(gyp_var)
227 if value:
228 if gyp_var in ('defines',):
229 value = [EscapeCppDefine(v) for v in value]
230 if gyp_var in ('include_dirs',):
231 if src_dir and not src_dir.endswith('/'):
232 src_dir += '/'
233 result = []
234 for v in value:
235 v = FixPath(v, src_dir)
236 # Force SCons to evaluate the CPPPATH directories at
237 # SConscript-read time, so delayed evaluation of $SRC_DIR
238 # doesn't point it to the --generator-output= directory.
239 result.append('env.Dir(%r)' % v)
240 value = result
241 else:
242 value = map(repr, value)
243 WriteList(fp,
244 value,
245 prefix=indent,
246 preamble='%s%s = [\n ' % (indent, scons_var),
247 postamble=postamble)
248
249
250 def GenerateSConscript(output_filename, spec, build_file, build_file_data):
251 """
252 Generates a SConscript file for a specific target.
253
254 This generates a SConscript file suitable for building any or all of
255 the target's configurations.
256
257 A SConscript file may be called multiple times to generate targets for
258 multiple configurations. Consequently, it needs to be ready to build
259 the target for any requested configuration, and therefore contains
260 information about the settings for all configurations (generated into
261 the SConscript file at gyp configuration time) as well as logic for
262 selecting (at SCons build time) the specific configuration being built.
263
264 The general outline of a generated SConscript file is:
265
266 -- Header
267
268 -- Import 'env'. This contains a $CONFIG_NAME construction
269 variable that specifies what configuration to build
270 (e.g. Debug, Release).
271
272 -- Configurations. This is a dictionary with settings for
273 the different configurations (Debug, Release) under which this
274 target can be built. The values in the dictionary are themselves
275 dictionaries specifying what construction variables should added
276 to the local copy of the imported construction environment
277 (Append), should be removed (FilterOut), and should outright
278 replace the imported values (Replace).
279
280 -- Clone the imported construction environment and update
281 with the proper configuration settings.
282
283 -- Initialize the lists of the targets' input files and prerequisites.
284
285 -- Target-specific actions and rules. These come after the
286 input file and prerequisite initializations because the
287 outputs of the actions and rules may affect the input file
288 list (process_outputs_as_sources) and get added to the list of
289 prerequisites (so that they're guaranteed to be executed before
290 building the target).
291
292 -- Call the Builder for the target itself.
293
294 -- Arrange for any copies to be made into installation directories.
295
296 -- Set up the {name} Alias (phony Node) for the target as the
297 primary handle for building all of the target's pieces.
298
299 -- Use env.Require() to make sure the prerequisites (explicitly
300 specified, but also including the actions and rules) are built
301 before the target itself.
302
303 -- Return the {name} Alias to the calling SConstruct file
304 so it can be added to the list of default targets.
305 """
306 scons_target = SCons.Target(spec)
307
308 gyp_dir = os.path.dirname(output_filename)
309 if not gyp_dir:
310 gyp_dir = '.'
311 gyp_dir = os.path.abspath(gyp_dir)
312
313 output_dir = os.path.dirname(output_filename)
314 src_dir = build_file_data['_DEPTH']
315 src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
316 subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
317 src_subdir = '$SRC_DIR/' + subdir
318 src_subdir_ = src_subdir + '/'
319
320 component_name = os.path.splitext(os.path.basename(build_file))[0]
321 target_name = spec['target_name']
322
323 if not os.path.exists(gyp_dir):
324 os.makedirs(gyp_dir)
325 fp = open(output_filename, 'w')
326 fp.write(header)
327
328 fp.write('\nimport os\n')
329 fp.write('\nImport("env")\n')
330
331 #
332 fp.write('\n')
333 fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
334 fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
335
336 #
337 for config in spec['configurations'].itervalues():
338 if config.get('scons_line_length'):
339 fp.write(_spawn_hack)
340 break
341
342 #
343 indent = ' ' * 12
344 fp.write('\n')
345 fp.write('configurations = {\n')
346 for config_name, config in spec['configurations'].iteritems():
347 fp.write(' \'%s\' : {\n' % config_name)
348
349 fp.write(' \'Append\' : dict(\n')
350 GenerateConfig(fp, config, indent, src_subdir)
351 libraries = spec.get('libraries')
352 if libraries:
353 WriteList(fp,
354 map(repr, libraries),
355 prefix=indent,
356 preamble='%sLIBS = [\n ' % indent,
357 postamble='\n%s],\n' % indent)
358 fp.write(' ),\n')
359
360 fp.write(' \'FilterOut\' : dict(\n' )
361 for key, var in config.get('scons_remove', {}).iteritems():
362 fp.write(' %s = %s,\n' % (key, repr(var)))
363 fp.write(' ),\n')
364
365 fp.write(' \'Replace\' : dict(\n' )
366 scons_settings = config.get('scons_variable_settings', {})
367 for key in sorted(scons_settings.keys()):
368 val = pprint.pformat(scons_settings[key])
369 fp.write(' %s = %s,\n' % (key, val))
370 if 'c++' in spec.get('link_languages', []):
371 fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
372 if config.get('scons_line_length'):
373 fp.write(' SPAWN = gyp_spawn,\n')
374 fp.write(' ),\n')
375
376 fp.write(' \'ImportExternal\' : [\n' )
377 for var in config.get('scons_import_variables', []):
378 fp.write(' %s,\n' % repr(var))
379 fp.write(' ],\n')
380
381 fp.write(' \'PropagateExternal\' : [\n' )
382 for var in config.get('scons_propagate_variables', []):
383 fp.write(' %s,\n' % repr(var))
384 fp.write(' ],\n')
385
386 fp.write(' },\n')
387 fp.write('}\n')
388
389 fp.write('\n'
390 'config = configurations[env[\'CONFIG_NAME\']]\n'
391 'env.Append(**config[\'Append\'])\n'
392 'env.FilterOut(**config[\'FilterOut\'])\n'
393 'env.Replace(**config[\'Replace\'])\n')
394
395 fp.write('\n'
396 '# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
397 '# Disable that so we can control it from cflags in gyp.\n'
398 '# Note that Scons itself is inconsistent with its -fPIC\n'
399 '# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
400 '# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
401 'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
402
403 fp.write('\n'
404 'for _var in config[\'ImportExternal\']:\n'
405 ' if _var in ARGUMENTS:\n'
406 ' env[_var] = ARGUMENTS[_var]\n'
407 ' elif _var in os.environ:\n'
408 ' env[_var] = os.environ[_var]\n'
409 'for _var in config[\'PropagateExternal\']:\n'
410 ' if _var in ARGUMENTS:\n'
411 ' env[_var] = ARGUMENTS[_var]\n'
412 ' elif _var in os.environ:\n'
413 ' env[\'ENV\'][_var] = os.environ[_var]\n')
414
415 fp.write('\n'
416 "env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
417
418 #
419 #fp.write("\nif env.has_key('CPPPATH'):\n")
420 #fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
421
422 variants = spec.get('variants', {})
423 for setting in sorted(variants.keys()):
424 if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
425 fp.write('\n')
426 fp.write(if_fmt % repr(setting.upper()))
427 fp.write(' env.AppendUnique(\n')
428 GenerateConfig(fp, variants[setting], indent, src_subdir)
429 fp.write(' )\n')
430
431 #
432 scons_target.write_input_files(fp)
433
434 fp.write('\n')
435 fp.write('target_files = []\n')
436 prerequisites = spec.get('scons_prerequisites', [])
437 fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
438
439 actions = spec.get('actions', [])
440 for action in actions:
441 a = ['cd', src_subdir, '&&'] + action['action']
442 message = action.get('message')
443 if message:
444 message = repr(message)
445 inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
446 outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
447 if outputs:
448 template = _command_template
449 else:
450 template = _alias_template
451 fp.write(template % {
452 'inputs' : pprint.pformat(inputs),
453 'outputs' : pprint.pformat(outputs),
454 'action' : pprint.pformat(a),
455 'message' : message,
456 'target_name': target_name,
457 })
458 if int(action.get('process_outputs_as_sources', 0)):
459 fp.write('input_files.extend(_outputs)\n')
460 fp.write('prerequisites.extend(_outputs)\n')
461 fp.write('target_files.extend(_outputs)\n')
462
463 rules = spec.get('rules', [])
464 for rule in rules:
465 name = rule['rule_name']
466 a = ['cd', src_subdir, '&&'] + rule['action']
467 message = rule.get('message')
468 if message:
469 message = repr(message)
470 if int(rule.get('process_outputs_as_sources', 0)):
471 poas_line = '_processed_input_files.extend(_generated)'
472 else:
473 poas_line = '_processed_input_files.append(infile)'
474 inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
475 outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
476 fp.write(_rule_template % {
477 'inputs' : pprint.pformat(inputs),
478 'outputs' : pprint.pformat(outputs),
479 'action' : pprint.pformat(a),
480 'extension' : rule['extension'],
481 'name' : name,
482 'message' : message,
483 'process_outputs_as_sources_line' : poas_line,
484 'src_dir' : src_subdir_,
485 })
486
487 scons_target.write_target(fp, src_subdir)
488
489 copies = spec.get('copies', [])
490 if copies:
491 fp.write(_copy_action_template)
492 for copy in copies:
493 destdir = None
494 files = None
495 try:
496 destdir = copy['destination']
497 except KeyError, e:
498 gyp.common.ExceptionAppend(
499 e,
500 "Required 'destination' key missing for 'copies' in %s." % build_file)
501 raise
502 try:
503 files = copy['files']
504 except KeyError, e:
505 gyp.common.ExceptionAppend(
506 e, "Required 'files' key missing for 'copies' in %s." % build_file)
507 raise
508 if not files:
509 # TODO: should probably add a (suppressible) warning;
510 # a null file list may be unintentional.
511 continue
512 if not destdir:
513 raise Exception(
514 "Required 'destination' key is empty for 'copies' in %s." % build_file)
515
516 fmt = ('\n'
517 '_outputs = env.Command(%s,\n'
518 ' %s,\n'
519 ' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
520 for f in copy['files']:
521 # Remove trailing separators so basename() acts like Unix basename and
522 # always returns the last element, whether a file or dir. Without this,
523 # only the contents, not the directory itself, are copied (and nothing
524 # might be copied if dest already exists, since scons thinks nothing needs
525 # to be done).
526 dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
527 f = FixPath(f, src_subdir_)
528 dest = FixPath(dest, src_subdir_)
529 fp.write(fmt % (repr(dest), repr(f)))
530 fp.write('target_files.extend(_outputs)\n')
531
532 if spec.get('run_as') or int(spec.get('test', 0)):
533 run_as = spec.get('run_as', {
534 'action' : ['$TARGET_NAME', '--gtest_print_time'],
535 })
536 action = run_as.get('action', [])
537 working_directory = run_as.get('working_directory')
538 if not working_directory:
539 working_directory = gyp_dir
540 else:
541 if not os.path.isabs(working_directory):
542 working_directory = os.path.normpath(os.path.join(gyp_dir,
543 working_directory))
544 if run_as.get('environment'):
545 for (key, val) in run_as.get('environment').iteritems():
546 action = ['%s="%s"' % (key, val)] + action
547 action = ['cd', '"%s"' % working_directory, '&&'] + action
548 fp.write(_run_as_template % {
549 'action' : pprint.pformat(action),
550 'message' : run_as.get('message', ''),
551 })
552
553 fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
554 fp.write(fmt % target_name)
555
556 dependencies = spec.get('scons_dependencies', [])
557 if dependencies:
558 WriteList(fp, dependencies, preamble='dependencies = [\n ',
559 postamble='\n]\n')
560 fp.write('env.Requires(target_files, dependencies)\n')
561 fp.write('env.Requires(gyp_target, dependencies)\n')
562 fp.write('for prerequisite in prerequisites:\n')
563 fp.write(' env.Requires(prerequisite, dependencies)\n')
564 fp.write('env.Requires(gyp_target, prerequisites)\n')
565
566 if spec.get('run_as', 0) or int(spec.get('test', 0)):
567 fp.write(_run_as_template_suffix % {
568 'target_name': target_name,
569 })
570
571 fp.write('Return("gyp_target")\n')
572
573 fp.close()
574
575
576 #############################################################################
577 # TEMPLATE BEGIN
578
579 _wrapper_template = """\
580
581 __doc__ = '''
582 Wrapper configuration for building this entire "solution,"
583 including all the specific targets in various *.scons files.
584 '''
585
586 import os
587 import sys
588
589 import SCons.Environment
590 import SCons.Util
591
592 def GetProcessorCount():
593 '''
594 Detects the number of CPUs on the system. Adapted form:
595 http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
596 '''
597 # Linux, Unix and Mac OS X:
598 if hasattr(os, 'sysconf'):
599 if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
600 # Linux and Unix or Mac OS X with python >= 2.5:
601 return os.sysconf('SC_NPROCESSORS_ONLN')
602 else: # Mac OS X with Python < 2.5:
603 return int(os.popen2("sysctl -n hw.ncpu")[1].read())
604 # Windows:
605 if os.environ.has_key('NUMBER_OF_PROCESSORS'):
606 return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
607 return 1 # Default
608
609 # Support PROGRESS= to show progress in different ways.
610 p = ARGUMENTS.get('PROGRESS')
611 if p == 'spinner':
612 Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
613 interval=5,
614 file=open('/dev/tty', 'w'))
615 elif p == 'name':
616 Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
617
618 # Set the default -j value based on the number of processors.
619 SetOption('num_jobs', GetProcessorCount() + 1)
620
621 # Have SCons use its cached dependency information.
622 SetOption('implicit_cache', 1)
623
624 # Only re-calculate MD5 checksums if a timestamp has changed.
625 Decider('MD5-timestamp')
626
627 # Since we set the -j value by default, suppress SCons warnings about being
628 # unable to support parallel build on versions of Python with no threading.
629 default_warnings = ['no-no-parallel-support']
630 SetOption('warn', default_warnings + GetOption('warn'))
631
632 AddOption('--mode', nargs=1, dest='conf_list', default=[],
633 action='append', help='Configuration to build.')
634
635 AddOption('--verbose', dest='verbose', default=False,
636 action='store_true', help='Verbose command-line output.')
637
638
639 #
640 sconscript_file_map = %(sconscript_files)s
641
642 class LoadTarget:
643 '''
644 Class for deciding if a given target sconscript is to be included
645 based on a list of included target names, optionally prefixed with '-'
646 to exclude a target name.
647 '''
648 def __init__(self, load):
649 '''
650 Initialize a class with a list of names for possible loading.
651
652 Arguments:
653 load: list of elements in the LOAD= specification
654 '''
655 self.included = set([c for c in load if not c.startswith('-')])
656 self.excluded = set([c[1:] for c in load if c.startswith('-')])
657
658 if not self.included:
659 self.included = set(['all'])
660
661 def __call__(self, target):
662 '''
663 Returns True if the specified target's sconscript file should be
664 loaded, based on the initialized included and excluded lists.
665 '''
666 return (target in self.included or
667 ('all' in self.included and not target in self.excluded))
668
669 if 'LOAD' in ARGUMENTS:
670 load = ARGUMENTS['LOAD'].split(',')
671 else:
672 load = []
673 load_target = LoadTarget(load)
674
675 sconscript_files = []
676 for target, sconscript in sconscript_file_map.iteritems():
677 if load_target(target):
678 sconscript_files.append(sconscript)
679
680
681 target_alias_list= []
682
683 conf_list = GetOption('conf_list')
684 if conf_list:
685 # In case the same --mode= value was specified multiple times.
686 conf_list = list(set(conf_list))
687 else:
688 conf_list = [%(default_configuration)r]
689
690 sconsbuild_dir = Dir(%(sconsbuild_dir)s)
691
692
693 def FilterOut(self, **kw):
694 kw = SCons.Environment.copy_non_reserved_keywords(kw)
695 for key, val in kw.items():
696 envval = self.get(key, None)
697 if envval is None:
698 # No existing variable in the environment, so nothing to delete.
699 continue
700
701 for vremove in val:
702 # Use while not if, so we can handle duplicates.
703 while vremove in envval:
704 envval.remove(vremove)
705
706 self[key] = envval
707
708 # TODO(sgk): SCons.Environment.Append() has much more logic to deal
709 # with various types of values. We should handle all those cases in here
710 # too. (If variable is a dict, etc.)
711
712
713 non_compilable_suffixes = {
714 'LINUX' : set([
715 '.bdic',
716 '.css',
717 '.dat',
718 '.fragment',
719 '.gperf',
720 '.h',
721 '.hh',
722 '.hpp',
723 '.html',
724 '.hxx',
725 '.idl',
726 '.in',
727 '.in0',
728 '.in1',
729 '.js',
730 '.mk',
731 '.rc',
732 '.sigs',
733 '',
734 ]),
735 'WINDOWS' : set([
736 '.h',
737 '.hh',
738 '.hpp',
739 '.dat',
740 '.idl',
741 '.in',
742 '.in0',
743 '.in1',
744 ]),
745 }
746
747 def compilable(env, file):
748 base, ext = os.path.splitext(str(file))
749 if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
750 return False
751 return True
752
753 def compilable_files(env, sources):
754 return [x for x in sources if compilable(env, x)]
755
756 def GypProgram(env, target, source, *args, **kw):
757 source = compilable_files(env, source)
758 result = env.Program(target, source, *args, **kw)
759 if env.get('INCREMENTAL'):
760 env.Precious(result)
761 return result
762
763 def GypTestProgram(env, target, source, *args, **kw):
764 source = compilable_files(env, source)
765 result = env.Program(target, source, *args, **kw)
766 if env.get('INCREMENTAL'):
767 env.Precious(*result)
768 return result
769
770 def GypLibrary(env, target, source, *args, **kw):
771 source = compilable_files(env, source)
772 result = env.Library(target, source, *args, **kw)
773 return result
774
775 def GypLoadableModule(env, target, source, *args, **kw):
776 source = compilable_files(env, source)
777 result = env.LoadableModule(target, source, *args, **kw)
778 return result
779
780 def GypStaticLibrary(env, target, source, *args, **kw):
781 source = compilable_files(env, source)
782 result = env.StaticLibrary(target, source, *args, **kw)
783 return result
784
785 def GypSharedLibrary(env, target, source, *args, **kw):
786 source = compilable_files(env, source)
787 result = env.SharedLibrary(target, source, *args, **kw)
788 if env.get('INCREMENTAL'):
789 env.Precious(result)
790 return result
791
792 def add_gyp_methods(env):
793 env.AddMethod(GypProgram)
794 env.AddMethod(GypTestProgram)
795 env.AddMethod(GypLibrary)
796 env.AddMethod(GypLoadableModule)
797 env.AddMethod(GypStaticLibrary)
798 env.AddMethod(GypSharedLibrary)
799
800 env.AddMethod(FilterOut)
801
802 env.AddMethod(compilable)
803
804
805 base_env = Environment(
806 tools = %(scons_tools)s,
807 INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
808 LIB_DIR='$TOP_BUILDDIR/lib',
809 OBJ_DIR='$TOP_BUILDDIR/obj',
810 SCONSBUILD_DIR=sconsbuild_dir.abspath,
811 SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
812 SRC_DIR=Dir(%(src_dir)r),
813 TARGET_PLATFORM='LINUX',
814 TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
815 LIBPATH=['$LIB_DIR'],
816 )
817
818 if not GetOption('verbose'):
819 base_env.SetDefault(
820 ARCOMSTR='Creating library $TARGET',
821 ASCOMSTR='Assembling $TARGET',
822 CCCOMSTR='Compiling $TARGET',
823 CONCATSOURCECOMSTR='ConcatSource $TARGET',
824 CXXCOMSTR='Compiling $TARGET',
825 LDMODULECOMSTR='Building loadable module $TARGET',
826 LINKCOMSTR='Linking $TARGET',
827 MANIFESTCOMSTR='Updating manifest for $TARGET',
828 MIDLCOMSTR='Compiling IDL $TARGET',
829 PCHCOMSTR='Precompiling $TARGET',
830 RANLIBCOMSTR='Indexing $TARGET',
831 RCCOMSTR='Compiling resource $TARGET',
832 SHCCCOMSTR='Compiling $TARGET',
833 SHCXXCOMSTR='Compiling $TARGET',
834 SHLINKCOMSTR='Linking $TARGET',
835 SHMANIFESTCOMSTR='Updating manifest for $TARGET',
836 )
837
838 add_gyp_methods(base_env)
839
840 for conf in conf_list:
841 env = base_env.Clone(CONFIG_NAME=conf)
842 SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
843 for sconscript in sconscript_files:
844 target_alias = env.SConscript(sconscript, exports=['env'])
845 if target_alias:
846 target_alias_list.extend(target_alias)
847
848 Default(Alias('all', target_alias_list))
849
850 help_fmt = '''
851 Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
852
853 Local command-line build options:
854 --mode=CONFIG Configuration to build:
855 --mode=Debug [default]
856 --mode=Release
857 --verbose Print actual executed command lines.
858
859 Supported command-line build variables:
860 LOAD=[module,...] Comma-separated list of components to load in the
861 dependency graph ('-' prefix excludes)
862 PROGRESS=type Display a progress indicator:
863 name: print each evaluated target name
864 spinner: print a spinner every 5 targets
865
866 The following TARGET names can also be used as LOAD= module names:
867
868 %%s
869 '''
870
871 if GetOption('help'):
872 def columnar_text(items, width=78, indent=2, sep=2):
873 result = []
874 colwidth = max(map(len, items)) + sep
875 cols = (width - indent) / colwidth
876 if cols < 1:
877 cols = 1
878 rows = (len(items) + cols - 1) / cols
879 indent = '%%*s' %% (indent, '')
880 sep = indent
881 for row in xrange(0, rows):
882 result.append(sep)
883 for i in xrange(row, len(items), rows):
884 result.append('%%-*s' %% (colwidth, items[i]))
885 sep = '\\n' + indent
886 result.append('\\n')
887 return ''.join(result)
888
889 load_list = set(sconscript_file_map.keys())
890 target_aliases = set(map(str, target_alias_list))
891
892 common = load_list and target_aliases
893 load_only = load_list - common
894 target_only = target_aliases - common
895 help_text = [help_fmt %% columnar_text(sorted(list(common)))]
896 if target_only:
897 fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
898 help_text.append(fmt %% columnar_text(sorted(list(target_only))))
899 if load_only:
900 fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
901 help_text.append(fmt %% columnar_text(sorted(list(load_only))))
902 Help(''.join(help_text))
903 """
904
905 # TEMPLATE END
906 #############################################################################
907
908
909 def GenerateSConscriptWrapper(build_file, build_file_data, name,
910 output_filename, sconscript_files,
911 default_configuration):
912 """
913 Generates the "wrapper" SConscript file (analogous to the Visual Studio
914 solution) that calls all the individual target SConscript files.
915 """
916 output_dir = os.path.dirname(output_filename)
917 src_dir = build_file_data['_DEPTH']
918 src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
919 if not src_dir_rel:
920 src_dir_rel = '.'
921 scons_settings = build_file_data.get('scons_settings', {})
922 sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
923 scons_tools = scons_settings.get('tools', ['default'])
924
925 sconscript_file_lines = ['dict(']
926 for target in sorted(sconscript_files.keys()):
927 sconscript = sconscript_files[target]
928 sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
929 sconscript_file_lines.append(')')
930
931 fp = open(output_filename, 'w')
932 fp.write(header)
933 fp.write(_wrapper_template % {
934 'default_configuration' : default_configuration,
935 'name' : name,
936 'scons_tools' : repr(scons_tools),
937 'sconsbuild_dir' : repr(sconsbuild_dir),
938 'sconscript_files' : '\n'.join(sconscript_file_lines),
939 'src_dir' : src_dir_rel,
940 })
941 fp.close()
942
943 # Generate the SConstruct file that invokes the wrapper SConscript.
944 dir, fname = os.path.split(output_filename)
945 SConstruct = os.path.join(dir, 'SConstruct')
946 fp = open(SConstruct, 'w')
947 fp.write(header)
948 fp.write('SConscript(%s)\n' % repr(fname))
949 fp.close()
950
951
952 def TargetFilename(target, build_file=None, output_suffix=''):
953 """Returns the .scons file name for the specified target.
954 """
955 if build_file is None:
956 build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
957 output_file = os.path.join(os.path.dirname(build_file),
958 target + output_suffix + '.scons')
959 return output_file
960
961
962 def GenerateOutput(target_list, target_dicts, data, params):
963 """
964 Generates all the output files for the specified targets.
965 """
966 options = params['options']
967
968 if options.generator_output:
969 def output_path(filename):
970 return filename.replace(params['cwd'], options.generator_output)
971 else:
972 def output_path(filename):
973 return filename
974
975 default_configuration = None
976
977 for qualified_target in target_list:
978 spec = target_dicts[qualified_target]
979 if spec['toolset'] != 'target':
980 raise Exception(
981 'Multiple toolsets not supported in scons build (target %s)' %
982 qualified_target)
983 scons_target = SCons.Target(spec)
984 if scons_target.is_ignored:
985 continue
986
987 # TODO: assumes the default_configuration of the first target
988 # non-Default target is the correct default for all targets.
989 # Need a better model for handle variation between targets.
990 if (not default_configuration and
991 spec['default_configuration'] != 'Default'):
992 default_configuration = spec['default_configuration']
993
994 build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
995 output_file = TargetFilename(target, build_file, options.suffix)
996 if options.generator_output:
997 output_file = output_path(output_file)
998
999 if not spec.has_key('libraries'):
1000 spec['libraries'] = []
1001
1002 # Add dependent static library targets to the 'libraries' value.
1003 deps = spec.get('dependencies', [])
1004 spec['scons_dependencies'] = []
1005 for d in deps:
1006 td = target_dicts[d]
1007 target_name = td['target_name']
1008 spec['scons_dependencies'].append("Alias('%s')" % target_name)
1009 if td['type'] in ('static_library', 'shared_library'):
1010 libname = td.get('product_name', target_name)
1011 spec['libraries'].append('lib' + libname)
1012 if td['type'] == 'loadable_module':
1013 prereqs = spec.get('scons_prerequisites', [])
1014 # TODO: parameterize with <(SHARED_LIBRARY_*) variables?
1015 td_target = SCons.Target(td)
1016 td_target.target_prefix = '${SHLIBPREFIX}'
1017 td_target.target_suffix = '${SHLIBSUFFIX}'
1018
1019 GenerateSConscript(output_file, spec, build_file, data[build_file])
1020
1021 if not default_configuration:
1022 default_configuration = 'Default'
1023
1024 for build_file in sorted(data.keys()):
1025 path, ext = os.path.splitext(build_file)
1026 if ext != '.gyp':
1027 continue
1028 output_dir, basename = os.path.split(path)
1029 output_filename = path + '_main' + options.suffix + '.scons'
1030
1031 all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
1032 sconscript_files = {}
1033 for t in all_targets:
1034 scons_target = SCons.Target(target_dicts[t])
1035 if scons_target.is_ignored:
1036 continue
1037 bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
1038 target_filename = TargetFilename(target, bf, options.suffix)
1039 tpath = gyp.common.RelativePath(target_filename, output_dir)
1040 sconscript_files[target] = tpath
1041
1042 output_filename = output_path(output_filename)
1043 if sconscript_files:
1044 GenerateSConscriptWrapper(build_file, data[build_file], basename,
1045 output_filename, sconscript_files,
1046 default_configuration)
+0
-1139
mozc_build_tools/gyp/pylib/gyp/generator/xcode.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import filecmp
7 import gyp.common
8 import gyp.xcodeproj_file
9 import errno
10 import os
11 import posixpath
12 import re
13 import shutil
14 import subprocess
15 import tempfile
16
17
18 # Project files generated by this module will use _intermediate_var as a
19 # custom Xcode setting whose value is a DerivedSources-like directory that's
20 # project-specific and configuration-specific. The normal choice,
21 # DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
22 # as it is likely that multiple targets within a single project file will want
23 # to access the same set of generated files. The other option,
24 # PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
25 # it is not configuration-specific. INTERMEDIATE_DIR is defined as
26 # $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
27 _intermediate_var = 'INTERMEDIATE_DIR'
28
29 # SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
30 # targets that share the same BUILT_PRODUCTS_DIR.
31 _shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
32
33 generator_default_variables = {
34 'EXECUTABLE_PREFIX': '',
35 'EXECUTABLE_SUFFIX': '',
36 'STATIC_LIB_PREFIX': 'lib',
37 'SHARED_LIB_PREFIX': 'lib',
38 'STATIC_LIB_SUFFIX': '.a',
39 'SHARED_LIB_SUFFIX': '.dylib',
40 # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
41 # It is specific to each build environment. It is only guaranteed to exist
42 # and be constant within the context of a project, corresponding to a single
43 # input file. Some build environments may allow their intermediate directory
44 # to be shared on a wider scale, but this is not guaranteed.
45 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
46 'OS': 'mac',
47 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
48 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
49 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
50 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
51 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
52 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
53 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
54 'CONFIGURATION_NAME': '$(CONFIGURATION)',
55 }
56
57 # The Xcode-specific sections that hold paths.
58 generator_additional_path_sections = [
59 'mac_bundle_resources',
60 # 'mac_framework_dirs', input already handles _dirs endings.
61 ]
62
63 # The Xcode-specific keys that exist on targets and aren't moved down to
64 # configurations.
65 generator_additional_non_configuration_keys = [
66 'mac_bundle',
67 'mac_bundle_resources',
68 'xcode_create_dependents_test_runner',
69 ]
70
71 # We want to let any rules apply to files that are resources also.
72 generator_extra_sources_for_rules = [
73 'mac_bundle_resources',
74 ]
75
76
77 def CreateXCConfigurationList(configuration_names):
78 xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
79 for configuration_name in configuration_names:
80 xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
81 'name': configuration_name})
82 xccl.AppendProperty('buildConfigurations', xcbc)
83 xccl.SetProperty('defaultConfigurationName', configuration_names[0])
84 return xccl
85
86
87 class XcodeProject(object):
88 def __init__(self, gyp_path, path, build_file_dict):
89 self.gyp_path = gyp_path
90 self.path = path
91 self.project = gyp.xcodeproj_file.PBXProject(path=path)
92 projectDirPath = gyp.common.RelativePath(
93 os.path.dirname(os.path.abspath(self.gyp_path)),
94 os.path.dirname(path) or '.')
95 self.project.SetProperty('projectDirPath', projectDirPath)
96 self.project_file = \
97 gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
98 self.build_file_dict = build_file_dict
99
100 # TODO(mark): add destructor that cleans up self.path if created_dir is
101 # True and things didn't complete successfully. Or do something even
102 # better with "try"?
103 self.created_dir = False
104 try:
105 os.makedirs(self.path)
106 self.created_dir = True
107 except OSError, e:
108 if e.errno != errno.EEXIST:
109 raise
110
111 def Finalize1(self, xcode_targets, serialize_all_tests):
112 # Collect a list of all of the build configuration names used by the
113 # various targets in the file. It is very heavily advised to keep each
114 # target in an entire project (even across multiple project files) using
115 # the same set of configuration names.
116 configurations = []
117 for xct in self.project.GetProperty('targets'):
118 xccl = xct.GetProperty('buildConfigurationList')
119 xcbcs = xccl.GetProperty('buildConfigurations')
120 for xcbc in xcbcs:
121 name = xcbc.GetProperty('name')
122 if name not in configurations:
123 configurations.append(name)
124
125 # Replace the XCConfigurationList attached to the PBXProject object with
126 # a new one specifying all of the configuration names used by the various
127 # targets.
128 try:
129 xccl = CreateXCConfigurationList(configurations)
130 self.project.SetProperty('buildConfigurationList', xccl)
131 except:
132 import sys
133 sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
134 raise
135
136 # The need for this setting is explained above where _intermediate_var is
137 # defined. The comments below about wanting to avoid project-wide build
138 # settings apply here too, but this needs to be set on a project-wide basis
139 # so that files relative to the _intermediate_var setting can be displayed
140 # properly in the Xcode UI.
141 #
142 # Note that for configuration-relative files such as anything relative to
143 # _intermediate_var, for the purposes of UI tree view display, Xcode will
144 # only resolve the configuration name once, when the project file is
145 # opened. If the active build configuration is changed, the project file
146 # must be closed and reopened if it is desired for the tree view to update.
147 # This is filed as Apple radar 6588391.
148 xccl.SetBuildSetting(_intermediate_var,
149 '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
150 xccl.SetBuildSetting(_shared_intermediate_var,
151 '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
152
153 # Set user-specified project-wide build settings. This is intended to be
154 # used very sparingly. Really, almost everything should go into
155 # target-specific build settings sections. The project-wide settings are
156 # only intended to be used in cases where Xcode attempts to resolve
157 # variable references in a project context as opposed to a target context,
158 # such as when resolving sourceTree references while building up the tree
159 # tree view for UI display.
160 for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
161 xccl.SetBuildSetting(xck, xcv)
162
163 # Sort the targets based on how they appeared in the input.
164 # TODO(mark): Like a lot of other things here, this assumes internal
165 # knowledge of PBXProject - in this case, of its "targets" property.
166
167 # ordinary_targets are ordinary targets that are already in the project
168 # file. run_test_targets are the targets that run unittests and should be
169 # used for the Run All Tests target. support_targets are the action/rule
170 # targets used by GYP file targets, just kept for the assert check.
171 ordinary_targets = []
172 run_test_targets = []
173 support_targets = []
174
175 # targets is full list of targets in the project.
176 targets = []
177
178 # does the it define it's own "all"?
179 has_custom_all = False
180
181 # targets_for_all is the list of ordinary_targets that should be listed
182 # in this project's "All" target. It includes each non_runtest_target
183 # that does not have suppress_wildcard set.
184 targets_for_all = []
185
186 for target in self.build_file_dict['targets']:
187 target_name = target['target_name']
188 toolset = target['toolset']
189 qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
190 toolset)
191 xcode_target = xcode_targets[qualified_target]
192 # Make sure that the target being added to the sorted list is already in
193 # the unsorted list.
194 assert xcode_target in self.project._properties['targets']
195 targets.append(xcode_target)
196 ordinary_targets.append(xcode_target)
197 if xcode_target.support_target:
198 support_targets.append(xcode_target.support_target)
199 targets.append(xcode_target.support_target)
200
201 if not int(target.get('suppress_wildcard', False)):
202 targets_for_all.append(xcode_target)
203
204 if target_name.lower() == 'all':
205 has_custom_all = True;
206
207 # If this target has a 'run_as' attribute, or is a test, add its
208 # target to the targets, and (if it's a test) add it the to the
209 # test targets.
210 is_test = int(target.get('test', 0))
211 if target.get('run_as') or is_test:
212 # Make a target to run something. It should have one
213 # dependency, the parent xcode target.
214 xccl = CreateXCConfigurationList(configurations)
215 run_target = gyp.xcodeproj_file.PBXAggregateTarget({
216 'name': 'Run ' + target_name,
217 'productName': xcode_target.GetProperty('productName'),
218 'buildConfigurationList': xccl,
219 },
220 parent=self.project)
221 run_target.AddDependency(xcode_target)
222
223 # The test runner target has a build phase that executes the
224 # test, if this has the 'test' attribute. If the 'run_as' tag
225 # doesn't exist (meaning that this must be a test), then we
226 # define a default test command line.
227 command = target.get('run_as', {
228 'action': ['${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}']
229 })
230
231 script = ''
232 if command.get('working_directory'):
233 script = script + 'cd "%s"\n' % \
234 gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
235 command.get('working_directory'))
236
237 if command.get('environment'):
238 script = script + "\n".join(
239 ['export %s="%s"' %
240 (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
241 for (key, val) in command.get('environment').iteritems()]) + "\n"
242
243 # Some test end up using sockets, files on disk, etc. and can get
244 # confused if more then one test runs at a time. The generator
245 # flag 'xcode_serialize_all_test_runs' controls the forcing of all
246 # tests serially. It defaults to True. To get serial runs this
247 # little bit of python does the same as the linux flock utility to
248 # make sure only one runs at a time.
249 command_prefix = ''
250 if is_test and serialize_all_tests:
251 command_prefix = \
252 """python -c "import fcntl, subprocess, sys
253 file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
254 fcntl.flock(file.fileno(), fcntl.LOCK_EX)
255 sys.exit(subprocess.call(sys.argv[1:]))" """
256
257 # If we were unable to exec for some reason, we want to exit
258 # with an error, and fixup variable references to be shell
259 # syntax instead of xcode syntax.
260 script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
261 gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
262 gyp.common.EncodePOSIXShellList(command.get('action')))
263
264 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
265 'shellScript': script,
266 'showEnvVarsInLog': 0,
267 })
268 run_target.AppendProperty('buildPhases', ssbp)
269
270 # Add the run target to the project file.
271 targets.append(run_target)
272 if is_test:
273 run_test_targets.append(run_target)
274 xcode_target.test_runner = run_target
275
276
277 # Make sure that the list of targets being replaced is the same length as
278 # the one replacing it, but allow for the added test runner targets.
279 assert len(self.project._properties['targets']) == \
280 len(ordinary_targets) + len(support_targets)
281
282 self.project._properties['targets'] = targets
283
284 # Get rid of unnecessary levels of depth in groups like the Source group.
285 self.project.RootGroupsTakeOverOnlyChildren(True)
286
287 # Sort the groups nicely. Do this after sorting the targets, because the
288 # Products group is sorted based on the order of the targets.
289 self.project.SortGroups()
290
291 # Create an "All" target if there's more than one target in this project
292 # file and the project didn't define its own "All" target. Put a generated
293 # "All" target first so that people opening up the project for the first
294 # time will build everything by default.
295 if len(targets_for_all) > 1 and not has_custom_all:
296 xccl = CreateXCConfigurationList(configurations)
297 all_target = gyp.xcodeproj_file.PBXAggregateTarget(
298 {
299 'buildConfigurationList': xccl,
300 'name': 'All',
301 },
302 parent=self.project)
303
304 for target in targets_for_all:
305 all_target.AddDependency(target)
306
307 # TODO(mark): This is evil because it relies on internal knowledge of
308 # PBXProject._properties. It's important to get the "All" target first,
309 # though.
310 self.project._properties['targets'].insert(0, all_target)
311
312 # The same, but for run_test_targets.
313 if len(run_test_targets) > 1:
314 xccl = CreateXCConfigurationList(configurations)
315 run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
316 {
317 'buildConfigurationList': xccl,
318 'name': 'Run All Tests',
319 },
320 parent=self.project)
321 for run_test_target in run_test_targets:
322 run_all_tests_target.AddDependency(run_test_target)
323
324 # Insert after the "All" target, which must exist if there is more than
325 # one run_test_target.
326 self.project._properties['targets'].insert(1, run_all_tests_target)
327
328 def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
329 # Finalize2 needs to happen in a separate step because the process of
330 # updating references to other projects depends on the ordering of targets
331 # within remote project files. Finalize1 is responsible for sorting duty,
332 # and once all project files are sorted, Finalize2 can come in and update
333 # these references.
334
335 # To support making a "test runner" target that will run all the tests
336 # that are direct dependents of any given target, we look for
337 # xcode_create_dependents_test_runner being set on an Aggregate target,
338 # and generate a second target that will run the tests runners found under
339 # the marked target.
340 for bf_tgt in self.build_file_dict['targets']:
341 if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
342 tgt_name = bf_tgt['target_name']
343 toolset = bf_tgt['toolset']
344 qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
345 tgt_name, toolset)
346 xcode_target = xcode_targets[qualified_target]
347 if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
348 # Collect all the run test targets.
349 all_run_tests = []
350 pbxtds = xcode_target.GetProperty('dependencies')
351 for pbxtd in pbxtds:
352 pbxcip = pbxtd.GetProperty('targetProxy')
353 dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
354 target_dict = xcode_target_to_target_dict[dependency_xct]
355 if target_dict and int(target_dict.get('test', 0)):
356 assert dependency_xct.test_runner
357 all_run_tests.append(dependency_xct.test_runner)
358
359 # Directly depend on all the runners as they depend on the target
360 # that builds them.
361 if len(all_run_tests) > 0:
362 run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
363 'name': 'Run %s Tests' % tgt_name,
364 'productName': tgt_name,
365 },
366 parent=self.project)
367 for run_test_target in all_run_tests:
368 run_all_target.AddDependency(run_test_target)
369
370 # Insert the test runner after the related target.
371 idx = self.project._properties['targets'].index(xcode_target)
372 self.project._properties['targets'].insert(idx + 1, run_all_target)
373
374 # Update all references to other projects, to make sure that the lists of
375 # remote products are complete. Otherwise, Xcode will fill them in when
376 # it opens the project file, which will result in unnecessary diffs.
377 # TODO(mark): This is evil because it relies on internal knowledge of
378 # PBXProject._other_pbxprojects.
379 for other_pbxproject in self.project._other_pbxprojects.keys():
380 self.project.AddOrGetProjectReference(other_pbxproject)
381
382 self.project.SortRemoteProductReferences()
383
384 # Give everything an ID.
385 self.project_file.ComputeIDs()
386
387 # Make sure that no two objects in the project file have the same ID. If
388 # multiple objects wind up with the same ID, upon loading the file, Xcode
389 # will only recognize one object (the last one in the file?) and the
390 # results are unpredictable.
391 self.project_file.EnsureNoIDCollisions()
392
393 def Write(self):
394 # Write the project file to a temporary location first. Xcode watches for
395 # changes to the project file and presents a UI sheet offering to reload
396 # the project when it does change. However, in some cases, especially when
397 # multiple projects are open or when Xcode is busy, things don't work so
398 # seamlessly. Sometimes, Xcode is able to detect that a project file has
399 # changed but can't unload it because something else is referencing it.
400 # To mitigate this problem, and to avoid even having Xcode present the UI
401 # sheet when an open project is rewritten for inconsequential changes, the
402 # project file is written to a temporary file in the xcodeproj directory
403 # first. The new temporary file is then compared to the existing project
404 # file, if any. If they differ, the new file replaces the old; otherwise,
405 # the new project file is simply deleted. Xcode properly detects a file
406 # being renamed over an open project file as a change and so it remains
407 # able to present the "project file changed" sheet under this system.
408 # Writing to a temporary file first also avoids the possible problem of
409 # Xcode rereading an incomplete project file.
410 (output_fd, new_pbxproj_path) = \
411 tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
412 dir=self.path)
413
414 try:
415 output_file = os.fdopen(output_fd, 'wb')
416
417 self.project_file.Print(output_file)
418 output_file.close()
419
420 pbxproj_path = os.path.join(self.path, 'project.pbxproj')
421
422 same = False
423 try:
424 same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
425 except OSError, e:
426 if e.errno != errno.ENOENT:
427 raise
428
429 if same:
430 # The new file is identical to the old one, just get rid of the new
431 # one.
432 os.unlink(new_pbxproj_path)
433 else:
434 # The new file is different from the old one, or there is no old one.
435 # Rename the new file to the permanent name.
436 #
437 # tempfile.mkstemp uses an overly restrictive mode, resulting in a
438 # file that can only be read by the owner, regardless of the umask.
439 # There's no reason to not respect the umask here, which means that
440 # an extra hoop is required to fetch it and reset the new file's mode.
441 #
442 # No way to get the umask without setting a new one? Set a safe one
443 # and then set it back to the old value.
444 umask = os.umask(077)
445 os.umask(umask)
446
447 os.chmod(new_pbxproj_path, 0666 & ~umask)
448 os.rename(new_pbxproj_path, pbxproj_path)
449
450 except Exception:
451 # Don't leave turds behind. In fact, if this code was responsible for
452 # creating the xcodeproj directory, get rid of that too.
453 os.unlink(new_pbxproj_path)
454 if self.created_dir:
455 shutil.rmtree(self.path, True)
456 raise
457
458
459 cached_xcode_version = None
460 def InstalledXcodeVersion():
461 """Fetches the installed version of Xcode, returns empty string if it is
462 unable to figure it out."""
463
464 global cached_xcode_version
465 if not cached_xcode_version is None:
466 return cached_xcode_version
467
468 # Default to an empty string
469 cached_xcode_version = ''
470
471 # Collect the xcodebuild's version information.
472 try:
473 import subprocess
474 cmd = ['/usr/bin/xcodebuild', '-version']
475 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
476 xcodebuild_version_info = proc.communicate()[0]
477 # Any error, return empty string
478 if proc.returncode:
479 xcodebuild_version_info = ''
480 except OSError:
481 # We failed to launch the tool
482 xcodebuild_version_info = ''
483
484 # Pull out the Xcode version itself.
485 match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
486 if match_line:
487 cached_xcode_version = match_line.group(1)
488 # Done!
489 return cached_xcode_version
490
491
492 def AddSourceToTarget(source, pbxp, xct):
493 # TODO(mark): Perhaps this can be made a little bit fancier.
494 source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
495 basename = posixpath.basename(source)
496 (root, ext) = posixpath.splitext(basename)
497 if ext != '':
498 ext = ext[1:].lower()
499
500 if ext in source_extensions:
501 xct.SourcesPhase().AddFile(source)
502 else:
503 # Files that aren't added to a sources build phase can still go into
504 # the project file, just not as part of a build phase.
505 pbxp.AddOrGetFileInRootGroup(source)
506
507
508 def AddResourceToTarget(resource, pbxp, xct):
509 # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
510 # where it's used.
511 xct.ResourcesPhase().AddFile(resource)
512
513
514 _xcode_variable_re = re.compile('(\$\((.*?)\))')
515 def ExpandXcodeVariables(string, expansions):
516 """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
517
518 In some rare cases, it is appropriate to expand Xcode variables when a
519 project file is generated. For any substring $(VAR) in string, if VAR is a
520 key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
521 Any $(VAR) substring in string for which VAR is not a key in the expansions
522 dict will remain in the returned string.
523 """
524
525 matches = _xcode_variable_re.findall(string)
526 if matches == None:
527 return string
528
529 matches.reverse()
530 for match in matches:
531 (to_replace, variable) = match
532 if not variable in expansions:
533 continue
534
535 replacement = expansions[variable]
536 string = re.sub(re.escape(to_replace), replacement, string)
537
538 return string
539
540
541 def EscapeXCodeArgument(s):
542 """We must escape the arguments that we give to XCode so that it knows not to
543 split on spaces and to respect backslash and quote literals."""
544 s = s.replace('\\', '\\\\')
545 s = s.replace('"', '\\"')
546 return '"' + s + '"'
547
548
549 def GenerateOutput(target_list, target_dicts, data, params):
550 options = params['options']
551 generator_flags = params.get('generator_flags', {})
552 parallel_builds = generator_flags.get('xcode_parallel_builds', True)
553 serialize_all_tests = \
554 generator_flags.get('xcode_serialize_all_test_runs', True)
555 xcode_projects = {}
556 for build_file, build_file_dict in data.iteritems():
557 (build_file_root, build_file_ext) = os.path.splitext(build_file)
558 if build_file_ext != '.gyp':
559 continue
560 xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
561 if options.generator_output:
562 xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
563 xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
564 xcode_projects[build_file] = xcp
565 pbxp = xcp.project
566
567 if parallel_builds:
568 pbxp.SetProperty('attributes',
569 {'BuildIndependentTargetsInParallel': 'YES'})
570
571 main_group = pbxp.GetProperty('mainGroup')
572 build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
573 main_group.AppendChild(build_group)
574 for included_file in build_file_dict['included_files']:
575 build_group.AddOrGetFileByPath(included_file, False)
576
577 xcode_targets = {}
578 xcode_target_to_target_dict = {}
579 for qualified_target in target_list:
580 [build_file, target_name, toolset] = \
581 gyp.common.ParseQualifiedTarget(qualified_target)
582
583 spec = target_dicts[qualified_target]
584 if spec['toolset'] != 'target':
585 raise Exception(
586 'Multiple toolsets not supported in xcode build (target %s)' %
587 qualified_target)
588 configuration_names = [spec['default_configuration']]
589 for configuration_name in sorted(spec['configurations'].keys()):
590 if configuration_name not in configuration_names:
591 configuration_names.append(configuration_name)
592 xcp = xcode_projects[build_file]
593 pbxp = xcp.project
594
595 # Set up the configurations for the target according to the list of names
596 # supplied.
597 xccl = CreateXCConfigurationList(configuration_names)
598
599 # Create an XCTarget subclass object for the target. We use the type
600 # with "+bundle" appended if the target has "mac_bundle" set.
601 _types = {
602 'executable': 'com.apple.product-type.tool',
603 'loadable_module': 'com.apple.product-type.library.dynamic',
604 'shared_library': 'com.apple.product-type.library.dynamic',
605 'static_library': 'com.apple.product-type.library.static',
606 'executable+bundle': 'com.apple.product-type.application',
607 'loadable_module+bundle': 'com.apple.product-type.bundle',
608 'shared_library+bundle': 'com.apple.product-type.framework',
609 }
610
611 target_properties = {
612 'buildConfigurationList': xccl,
613 'name': target_name,
614 }
615
616 type = spec['type']
617 is_bundle = int(spec.get('mac_bundle', 0))
618 if type != 'none':
619 type_bundle_key = type
620 if is_bundle:
621 type_bundle_key += '+bundle'
622 xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
623 try:
624 target_properties['productType'] = _types[type_bundle_key]
625 except KeyError, e:
626 gyp.common.ExceptionAppend(e, "-- unknown product type while "
627 "writing target %s" % target_name)
628 raise
629 else:
630 xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
631
632 target_product_name = spec.get('product_name')
633 if target_product_name is not None:
634 target_properties['productName'] = target_product_name
635
636 xct = xctarget_type(target_properties, parent=pbxp,
637 force_outdir=spec.get('product_dir'),
638 force_prefix=spec.get('product_prefix'),
639 force_extension=spec.get('product_extension'))
640 pbxp.AppendProperty('targets', xct)
641 xcode_targets[qualified_target] = xct
642 xcode_target_to_target_dict[xct] = spec
643
644 # Xcode does not have a distinct type for loadable_modules that are pure
645 # BSD targets (ie-unbundled). It uses the same setup as a shared_library
646 # but the mach-o type is explictly set in the settings. So before we do
647 # anything else, for this one case, we stuff in that one setting. This
648 # would allow the other data in the spec to change it if need be.
649 if type == 'loadable_module' and not is_bundle:
650 xccl.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
651
652 spec_actions = spec.get('actions', [])
653 spec_rules = spec.get('rules', [])
654
655 # Xcode has some "issues" with checking dependencies for the "Compile
656 # sources" step with any source files/headers generated by actions/rules.
657 # To work around this, if a target is building anything directly (not
658 # type "none"), then a second target as used to run the GYP actions/rules
659 # and is made a dependency of this target. This way the work is done
660 # before the dependency checks for what should be recompiled.
661 support_xct = None
662 if type != 'none' and (spec_actions or spec_rules):
663 support_xccl = CreateXCConfigurationList(configuration_names);
664 support_target_properties = {
665 'buildConfigurationList': support_xccl,
666 'name': target_name + ' Support',
667 }
668 if target_product_name:
669 support_target_properties['productName'] = \
670 target_product_name + ' Support'
671 support_xct = \
672 gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
673 parent=pbxp)
674 pbxp.AppendProperty('targets', support_xct)
675 xct.AddDependency(support_xct)
676 # Hang the support target off the main target so it can be tested/found
677 # by the generator during Finalize.
678 xct.support_target = support_xct
679
680 prebuild_index = 0
681
682 # Add custom shell script phases for "actions" sections.
683 for action in spec_actions:
684 # There's no need to write anything into the script to ensure that the
685 # output directories already exist, because Xcode will look at the
686 # declared outputs and automatically ensure that they exist for us.
687
688 # Do we have a message to print when this action runs?
689 message = action.get('message')
690 if message:
691 message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
692 else:
693 message = ''
694
695 # Turn the list into a string that can be passed to a shell.
696 action_string = gyp.common.EncodePOSIXShellList(action['action'])
697
698 # Convert Xcode-type variable references to sh-compatible environment
699 # variable references.
700 message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
701 action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
702 action_string)
703
704 script = ''
705 # Include the optional message
706 if message_sh:
707 script += message_sh + '\n'
708 # Be sure the script runs in exec, and that if exec fails, the script
709 # exits signalling an error.
710 script += 'exec ' + action_string_sh + '\nexit 1\n'
711 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
712 'inputPaths': action['inputs'],
713 'name': 'Action "' + action['action_name'] + '"',
714 'outputPaths': action['outputs'],
715 'shellScript': script,
716 'showEnvVarsInLog': 0,
717 })
718
719 if support_xct:
720 support_xct.AppendProperty('buildPhases', ssbp)
721 else:
722 # TODO(mark): this assumes too much knowledge of the internals of
723 # xcodeproj_file; some of these smarts should move into xcodeproj_file
724 # itself.
725 xct._properties['buildPhases'].insert(prebuild_index, ssbp)
726 prebuild_index = prebuild_index + 1
727
728 # TODO(mark): Should verify that at most one of these is specified.
729 if int(action.get('process_outputs_as_sources', False)):
730 for output in action['outputs']:
731 AddSourceToTarget(output, pbxp, xct)
732
733 if int(action.get('process_outputs_as_mac_bundle_resources', False)):
734 for output in action['outputs']:
735 AddResourceToTarget(output, pbxp, xct)
736
737 # tgt_mac_bundle_resources holds the list of bundle resources so
738 # the rule processing can check against it.
739 if is_bundle:
740 tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
741 else:
742 tgt_mac_bundle_resources = []
743
744 # Add custom shell script phases driving "make" for "rules" sections.
745 #
746 # Xcode's built-in rule support is almost powerful enough to use directly,
747 # but there are a few significant deficiencies that render them unusable.
748 # There are workarounds for some of its inadequacies, but in aggregate,
749 # the workarounds added complexity to the generator, and some workarounds
750 # actually require input files to be crafted more carefully than I'd like.
751 # Consequently, until Xcode rules are made more capable, "rules" input
752 # sections will be handled in Xcode output by shell script build phases
753 # performed prior to the compilation phase.
754 #
755 # The following problems with Xcode rules were found. The numbers are
756 # Apple radar IDs. I hope that these shortcomings are addressed, I really
757 # liked having the rules handled directly in Xcode during the period that
758 # I was prototyping this.
759 #
760 # 6588600 Xcode compiles custom script rule outputs too soon, compilation
761 # fails. This occurs when rule outputs from distinct inputs are
762 # interdependent. The only workaround is to put rules and their
763 # inputs in a separate target from the one that compiles the rule
764 # outputs. This requires input file cooperation and it means that
765 # process_outputs_as_sources is unusable.
766 # 6584932 Need to declare that custom rule outputs should be excluded from
767 # compilation. A possible workaround is to lie to Xcode about a
768 # rule's output, giving it a dummy file it doesn't know how to
769 # compile. The rule action script would need to touch the dummy.
770 # 6584839 I need a way to declare additional inputs to a custom rule.
771 # A possible workaround is a shell script phase prior to
772 # compilation that touches a rule's primary input files if any
773 # would-be additional inputs are newer than the output. Modifying
774 # the source tree - even just modification times - feels dirty.
775 # 6564240 Xcode "custom script" build rules always dump all environment
776 # variables. This is a low-prioroty problem and is not a
777 # show-stopper.
778 rules_by_ext = {}
779 for rule in spec_rules:
780 rules_by_ext[rule['extension']] = rule
781
782 # First, some definitions:
783 #
784 # A "rule source" is a file that was listed in a target's "sources"
785 # list and will have a rule applied to it on the basis of matching the
786 # rule's "extensions" attribute. Rule sources are direct inputs to
787 # rules.
788 #
789 # Rule definitions may specify additional inputs in their "inputs"
790 # attribute. These additional inputs are used for dependency tracking
791 # purposes.
792 #
793 # A "concrete output" is a rule output with input-dependent variables
794 # resolved. For example, given a rule with:
795 # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
796 # if the target's "sources" list contained "one.ext" and "two.ext",
797 # the "concrete output" for rule input "two.ext" would be "two.cc". If
798 # a rule specifies multiple outputs, each input file that the rule is
799 # applied to will have the same number of concrete outputs.
800 #
801 # If any concrete outputs are outdated or missing relative to their
802 # corresponding rule_source or to any specified additional input, the
803 # rule action must be performed to generate the concrete outputs.
804
805 # concrete_outputs_by_rule_source will have an item at the same index
806 # as the rule['rule_sources'] that it corresponds to. Each item is a
807 # list of all of the concrete outputs for the rule_source.
808 concrete_outputs_by_rule_source = []
809
810 # concrete_outputs_all is a flat list of all concrete outputs that this
811 # rule is able to produce, given the known set of input files
812 # (rule_sources) that apply to it.
813 concrete_outputs_all = []
814
815 # messages & actions are keyed by the same indices as rule['rule_sources']
816 # and concrete_outputs_by_rule_source. They contain the message and
817 # action to perform after resolving input-dependent variables. The
818 # message is optional, in which case None is stored for each rule source.
819 messages = []
820 actions = []
821
822 for rule_source in rule.get('rule_sources', []):
823 rule_source_basename = posixpath.basename(rule_source)
824 (rule_source_root, rule_source_ext) = \
825 posixpath.splitext(rule_source_basename)
826
827 # These are the same variable names that Xcode uses for its own native
828 # rule support. Because Xcode's rule engine is not being used, they
829 # need to be expanded as they are written to the makefile.
830 rule_input_dict = {
831 'INPUT_FILE_BASE': rule_source_root,
832 'INPUT_FILE_SUFFIX': rule_source_ext,
833 'INPUT_FILE_NAME': rule_source_basename,
834 'INPUT_FILE_PATH': rule_source,
835 }
836
837 concrete_outputs_for_this_rule_source = []
838 for output in rule.get('outputs', []):
839 # Fortunately, Xcode and make both use $(VAR) format for their
840 # variables, so the expansion is the only transformation necessary.
841 # Any remaning $(VAR)-type variables in the string can be given
842 # directly to make, which will pick up the correct settings from
843 # what Xcode puts into the environment.
844 concrete_output = ExpandXcodeVariables(output, rule_input_dict)
845 concrete_outputs_for_this_rule_source.append(concrete_output)
846
847 # Add all concrete outputs to the project.
848 pbxp.AddOrGetFileInRootGroup(concrete_output)
849
850 concrete_outputs_by_rule_source.append( \
851 concrete_outputs_for_this_rule_source)
852 concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
853
854 # TODO(mark): Should verify that at most one of these is specified.
855 if int(rule.get('process_outputs_as_sources', False)):
856 for output in concrete_outputs_for_this_rule_source:
857 AddSourceToTarget(output, pbxp, xct)
858
859 # If the file came from the mac_bundle_resources list or if the rule
860 # is marked to process outputs as bundle resource, do so.
861 was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
862 if was_mac_bundle_resource or \
863 int(rule.get('process_outputs_as_mac_bundle_resources', False)):
864 for output in concrete_outputs_for_this_rule_source:
865 AddResourceToTarget(output, pbxp, xct)
866
867 # Do we have a message to print when this rule runs?
868 message = rule.get('message')
869 if message:
870 message = gyp.common.EncodePOSIXShellArgument(message)
871 message = '@echo note: ' + ExpandXcodeVariables(message,
872 rule_input_dict)
873 messages.append(message)
874
875 # Turn the list into a string that can be passed to a shell.
876 action_string = gyp.common.EncodePOSIXShellList(rule['action'])
877
878 action = ExpandXcodeVariables(action_string, rule_input_dict)
879 actions.append(action)
880
881 if len(concrete_outputs_all) > 0:
882 # TODO(mark): There's a possibilty for collision here. Consider
883 # target "t" rule "A_r" and target "t_A" rule "r".
884 makefile_name = '%s_%s.make' % (target_name, rule['rule_name'])
885 makefile_path = os.path.join(xcode_projects[build_file].path,
886 makefile_name)
887 # TODO(mark): try/close? Write to a temporary file and swap it only
888 # if it's got changes?
889 makefile = open(makefile_path, 'wb')
890
891 # make will build the first target in the makefile by default. By
892 # convention, it's called "all". List all (or at least one)
893 # concrete output for each rule source as a prerequisite of the "all"
894 # target.
895 makefile.write('all: \\\n')
896 for concrete_output_index in \
897 xrange(0, len(concrete_outputs_by_rule_source)):
898 # Only list the first (index [0]) concrete output of each input
899 # in the "all" target. Otherwise, a parallel make (-j > 1) would
900 # attempt to process each input multiple times simultaneously.
901 # Otherwise, "all" could just contain the entire list of
902 # concrete_outputs_all.
903 concrete_output = \
904 concrete_outputs_by_rule_source[concrete_output_index][0]
905 if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
906 eol = ''
907 else:
908 eol = ' \\'
909 makefile.write(' %s%s\n' % (concrete_output, eol))
910
911 for (rule_source, concrete_outputs, message, action) in \
912 zip(rule['rule_sources'], concrete_outputs_by_rule_source,
913 messages, actions):
914 makefile.write('\n')
915
916 # Add a rule that declares it can build each concrete output of a
917 # rule source. Collect the names of the directories that are
918 # required.
919 concrete_output_dirs = []
920 for concrete_output_index in xrange(0, len(concrete_outputs)):
921 concrete_output = concrete_outputs[concrete_output_index]
922 if concrete_output_index == 0:
923 bol = ''
924 else:
925 bol = ' '
926 makefile.write('%s%s \\\n' % (bol, concrete_output))
927
928 concrete_output_dir = posixpath.dirname(concrete_output)
929 if (concrete_output_dir and
930 concrete_output_dir not in concrete_output_dirs):
931 concrete_output_dirs.append(concrete_output_dir)
932
933 makefile.write(' : \\\n')
934
935 # The prerequisites for this rule are the rule source itself and
936 # the set of additional rule inputs, if any.
937 prerequisites = [rule_source]
938 prerequisites.extend(rule.get('inputs', []))
939 for prerequisite_index in xrange(0, len(prerequisites)):
940 prerequisite = prerequisites[prerequisite_index]
941 if prerequisite_index == len(prerequisites) - 1:
942 eol = ''
943 else:
944 eol = ' \\'
945 makefile.write(' %s%s\n' % (prerequisite, eol))
946
947 # Make sure that output directories exist before executing the rule
948 # action.
949 # TODO(mark): quote the list of concrete_output_dirs.
950 if len(concrete_output_dirs) > 0:
951 makefile.write('\tmkdir -p %s\n' % ' '.join(concrete_output_dirs))
952
953 # The rule message and action have already had the necessary variable
954 # substitutions performed.
955 if message:
956 makefile.write('\t%s\n' % message)
957 makefile.write('\t%s\n' % action)
958
959 makefile.close()
960
961 # It might be nice to ensure that needed output directories exist
962 # here rather than in each target in the Makefile, but that wouldn't
963 # work if there ever was a concrete output that had an input-dependent
964 # variable anywhere other than in the leaf position.
965
966 # Don't declare any inputPaths or outputPaths. If they're present,
967 # Xcode will provide a slight optimization by only running the script
968 # phase if any output is missing or outdated relative to any input.
969 # Unfortunately, it will also assume that all outputs are touched by
970 # the script, and if the outputs serve as files in a compilation
971 # phase, they will be unconditionally rebuilt. Since make might not
972 # rebuild everything that could be declared here as an output, this
973 # extra compilation activity is unnecessary. With inputPaths and
974 # outputPaths not supplied, make will always be called, but it knows
975 # enough to not do anything when everything is up-to-date.
976
977 # To help speed things up, pass -j COUNT to make so it does some work
978 # in parallel. Don't use ncpus because Xcode will build ncpus targets
979 # in parallel and if each target happens to have a rules step, there
980 # would be ncpus^2 things going. With a machine that has 2 quad-core
981 # Xeons, a build can quickly run out of processes based on
982 # scheduling/other tasks, and randomly failing builds are no good.
983 script = \
984 """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
985 if [ "${JOB_COUNT}" -gt 4 ]; then
986 JOB_COUNT=4
987 fi
988 exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
989 exit 1
990 """ % makefile_name
991 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
992 'name': 'Rule "' + rule['rule_name'] + '"',
993 'shellScript': script,
994 'showEnvVarsInLog': 0,
995 })
996
997 if support_xct:
998 support_xct.AppendProperty('buildPhases', ssbp)
999 else:
1000 # TODO(mark): this assumes too much knowledge of the internals of
1001 # xcodeproj_file; some of these smarts should move into xcodeproj_file
1002 # itself.
1003 xct._properties['buildPhases'].insert(prebuild_index, ssbp)
1004 prebuild_index = prebuild_index + 1
1005
1006 # Extra rule inputs also go into the project file. Concrete outputs were
1007 # already added when they were computed.
1008 for group in ['inputs', 'inputs_excluded']:
1009 for item in rule.get(group, []):
1010 pbxp.AddOrGetFileInRootGroup(item)
1011
1012 # Add "sources".
1013 for source in spec.get('sources', []):
1014 (source_root, source_extension) = posixpath.splitext(source)
1015 if source_extension[1:] not in rules_by_ext:
1016 # AddSourceToTarget will add the file to a root group if it's not
1017 # already there.
1018 AddSourceToTarget(source, pbxp, xct)
1019 else:
1020 pbxp.AddOrGetFileInRootGroup(source)
1021
1022 # Add "mac_bundle_resources" if it's a bundle of any type.
1023 if is_bundle:
1024 for resource in tgt_mac_bundle_resources:
1025 (resource_root, resource_extension) = posixpath.splitext(resource)
1026 if resource_extension[1:] not in rules_by_ext:
1027 AddResourceToTarget(resource, pbxp, xct)
1028 else:
1029 pbxp.AddOrGetFileInRootGroup(resource)
1030
1031 # Add "copies".
1032 for copy_group in spec.get('copies', []):
1033 pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
1034 'name': 'Copy to ' + copy_group['destination']
1035 },
1036 parent=xct)
1037 dest = copy_group['destination']
1038 if dest[0] not in ('/', '$'):
1039 # Relative paths are relative to $(SRCROOT).
1040 dest = '$(SRCROOT)/' + dest
1041 pbxcp.SetDestination(dest)
1042
1043 # TODO(mark): The usual comment about this knowing too much about
1044 # gyp.xcodeproj_file internals applies.
1045 xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
1046
1047 for file in copy_group['files']:
1048 pbxcp.AddFile(file)
1049
1050 # Excluded files can also go into the project file.
1051 for key in ['sources', 'mac_bundle_resources']:
1052 excluded_key = key + '_excluded'
1053 for item in spec.get(excluded_key, []):
1054 pbxp.AddOrGetFileInRootGroup(item)
1055
1056 # So can "inputs" and "outputs" sections of "actions" groups.
1057 for action in spec.get('actions', []):
1058 groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
1059 for group in groups:
1060 for item in action.get(group, []):
1061 # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
1062 # sources.
1063 if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
1064 pbxp.AddOrGetFileInRootGroup(item)
1065
1066 for postbuild in spec.get('postbuilds', []):
1067 action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
1068 script = 'exec ' + action_string_sh + '\nexit 1\n'
1069 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
1070 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
1071 'shellScript': script,
1072 'showEnvVarsInLog': 0,
1073 })
1074 xct.AppendProperty('buildPhases', ssbp)
1075
1076 # Add dependencies before libraries, because adding a dependency may imply
1077 # adding a library. It's preferable to keep dependencies listed first
1078 # during a link phase so that they can override symbols that would
1079 # otherwise be provided by libraries, which will usually include system
1080 # libraries. On some systems, ld is finicky and even requires the
1081 # libraries to be ordered in such a way that unresolved symbols in
1082 # earlier-listed libraries may only be resolved by later-listed libraries.
1083 # The Mac linker doesn't work that way, but other platforms do, and so
1084 # their linker invocations need to be constructed in this way. There's
1085 # no compelling reason for Xcode's linker invocations to differ.
1086
1087 if 'dependencies' in spec:
1088 for dependency in spec['dependencies']:
1089 xct.AddDependency(xcode_targets[dependency])
1090 # The support project also gets the dependencies (in case they are
1091 # needed for the actions/rules to work).
1092 if support_xct:
1093 support_xct.AddDependency(xcode_targets[dependency])
1094
1095 if 'libraries' in spec:
1096 for library in spec['libraries']:
1097 xct.FrameworksPhase().AddFile(library)
1098 # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
1099 # I wish Xcode handled this automatically.
1100 # TODO(mark): this logic isn't right. There are certain directories
1101 # that are always searched, we should check to see if the library is
1102 # in one of those directories, and if not, we should do the
1103 # AppendBuildSetting thing.
1104 if not posixpath.isabs(library) and not library.startswith('$'):
1105 # TODO(mark): Need to check to see if library_dir is already in
1106 # LIBRARY_SEARCH_PATHS.
1107 library_dir = posixpath.dirname(library)
1108 xct.AppendBuildSetting('LIBRARY_SEARCH_PATHS', library_dir)
1109
1110 for configuration_name in configuration_names:
1111 configuration = spec['configurations'][configuration_name]
1112 xcbc = xct.ConfigurationNamed(configuration_name)
1113 for include_dir in configuration.get('mac_framework_dirs', []):
1114 xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
1115 for include_dir in configuration.get('include_dirs', []):
1116 xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
1117 if 'defines' in configuration:
1118 for define in configuration['defines']:
1119 set_define = EscapeXCodeArgument(define)
1120 xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
1121 if 'xcode_settings' in configuration:
1122 for xck, xcv in configuration['xcode_settings'].iteritems():
1123 xcbc.SetBuildSetting(xck, xcv)
1124
1125 build_files = []
1126 for build_file, build_file_dict in data.iteritems():
1127 if build_file.endswith('.gyp'):
1128 build_files.append(build_file)
1129
1130 for build_file in build_files:
1131 xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
1132
1133 for build_file in build_files:
1134 xcode_projects[build_file].Finalize2(xcode_targets,
1135 xcode_target_to_target_dict)
1136
1137 for build_file in build_files:
1138 xcode_projects[build_file].Write()
+0
-2195
mozc_build_tools/gyp/pylib/gyp/input.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 from compiler.ast import Const
7 from compiler.ast import Dict
8 from compiler.ast import Discard
9 from compiler.ast import List
10 from compiler.ast import Module
11 from compiler.ast import Node
12 from compiler.ast import Stmt
13 import compiler
14 import copy
15 import gyp.common
16 import optparse
17 import os.path
18 import re
19 import shlex
20 import subprocess
21 import sys
22
23
24 # A list of types that are treated as linkable.
25 linkable_types = ['executable', 'shared_library', 'loadable_module']
26
27 # A list of sections that contain links to other targets.
28 dependency_sections = ['dependencies', 'export_dependent_settings']
29
30 # base_path_sections is a list of sections defined by GYP that contain
31 # pathnames. The generators can provide more keys, the two lists are merged
32 # into path_sections, but you should call IsPathSection instead of using either
33 # list directly.
34 base_path_sections = [
35 'destination',
36 'files',
37 'include_dirs',
38 'inputs',
39 'libraries',
40 'outputs',
41 'sources',
42 ]
43 path_sections = []
44
45
46 def IsPathSection(section):
47 # If section ends in one of these characters, it's applied to a section
48 # without the trailing characters. '/' is notably absent from this list,
49 # because there's no way for a regular expression to be treated as a path.
50 while section[-1:] in ('=', '+', '?', '!'):
51 section = section[0:-1]
52
53 if section in path_sections or \
54 section.endswith('_dir') or section.endswith('_dirs') or \
55 section.endswith('_file') or section.endswith('_files') or \
56 section.endswith('_path') or section.endswith('_paths'):
57 return True
58 return False
59
60
61 # base_non_configuraiton_keys is a list of key names that belong in the target
62 # itself and should not be propagated into its configurations. It is merged
63 # with a list that can come from the generator to
64 # create non_configuration_keys.
65 base_non_configuration_keys = [
66 # Sections that must exist inside targets and not configurations.
67 'actions',
68 'configurations',
69 'copies',
70 'default_configuration',
71 'dependencies',
72 'dependencies_original',
73 'link_languages',
74 'libraries',
75 'postbuilds',
76 'product_dir',
77 'product_extension',
78 'product_name',
79 'product_prefix',
80 'rules',
81 'run_as',
82 'sources',
83 'suppress_wildcard',
84 'target_name',
85 'test',
86 'toolset',
87 'toolsets',
88 'type',
89 'variants',
90
91 # Sections that can be found inside targets or configurations, but that
92 # should not be propagated from targets into their configurations.
93 'variables',
94 ]
95 non_configuration_keys = []
96
97 # Controls how the generator want the build file paths.
98 absolute_build_file_paths = False
99
100 # Controls whether or not the generator supports multiple toolsets.
101 multiple_toolsets = False
102
103
104 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
105 """Return a list of all build files included into build_file_path.
106
107 The returned list will contain build_file_path as well as all other files
108 that it included, either directly or indirectly. Note that the list may
109 contain files that were included into a conditional section that evaluated
110 to false and was not merged into build_file_path's dict.
111
112 aux_data is a dict containing a key for each build file or included build
113 file. Those keys provide access to dicts whose "included" keys contain
114 lists of all other files included by the build file.
115
116 included should be left at its default None value by external callers. It
117 is used for recursion.
118
119 The returned list will not contain any duplicate entries. Each build file
120 in the list will be relative to the current directory.
121 """
122
123 if included == None:
124 included = []
125
126 if build_file_path in included:
127 return included
128
129 included.append(build_file_path)
130
131 for included_build_file in aux_data[build_file_path].get('included', []):
132 GetIncludedBuildFiles(included_build_file, aux_data, included)
133
134 return included
135
136
137 def CheckedEval(file_contents):
138 """Return the eval of a gyp file.
139
140 The gyp file is restricted to dictionaries and lists only, and
141 repeated keys are not allowed.
142
143 Note that this is slower than eval() is.
144 """
145
146 ast = compiler.parse(file_contents)
147 assert isinstance(ast, Module)
148 c1 = ast.getChildren()
149 assert c1[0] is None
150 assert isinstance(c1[1], Stmt)
151 c2 = c1[1].getChildren()
152 assert isinstance(c2[0], Discard)
153 c3 = c2[0].getChildren()
154 assert len(c3) == 1
155 return CheckNode(c3[0], [])
156
157
158 def CheckNode(node, keypath):
159 if isinstance(node, Dict):
160 c = node.getChildren()
161 dict = {}
162 for n in range(0, len(c), 2):
163 assert isinstance(c[n], Const)
164 key = c[n].getChildren()[0]
165 if key in dict:
166 raise KeyError, "Key '" + key + "' repeated at level " + \
167 repr(len(keypath) + 1) + " with key path '" + \
168 '.'.join(keypath) + "'"
169 kp = list(keypath) # Make a copy of the list for descending this node.
170 kp.append(key)
171 dict[key] = CheckNode(c[n + 1], kp)
172 return dict
173 elif isinstance(node, List):
174 c = node.getChildren()
175 children = []
176 for index, child in enumerate(c):
177 kp = list(keypath) # Copy list.
178 kp.append(repr(index))
179 children.append(CheckNode(child, kp))
180 return children
181 elif isinstance(node, Const):
182 return node.getChildren()[0]
183 else:
184 raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
185 "': " + repr(node)
186
187
188 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
189 is_target, check):
190 if build_file_path in data:
191 return data[build_file_path]
192
193 if os.path.exists(build_file_path):
194 build_file_contents = open(build_file_path).read()
195 else:
196 raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
197
198 build_file_data = None
199 try:
200 if check:
201 build_file_data = CheckedEval(build_file_contents)
202 else:
203 build_file_data = eval(build_file_contents, {'__builtins__': None},
204 None)
205 except SyntaxError, e:
206 e.filename = build_file_path
207 raise
208 except Exception, e:
209 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
210 raise
211
212 data[build_file_path] = build_file_data
213 aux_data[build_file_path] = {}
214
215 # Scan for includes and merge them in.
216 try:
217 if is_target:
218 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
219 aux_data, variables, includes, check)
220 else:
221 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
222 aux_data, variables, None, check)
223 except Exception, e:
224 gyp.common.ExceptionAppend(e,
225 'while reading includes of ' + build_file_path)
226 raise
227
228 return build_file_data
229
230
231 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
232 variables, includes, check):
233 includes_list = []
234 if includes != None:
235 includes_list.extend(includes)
236 if 'includes' in subdict:
237 for include in subdict['includes']:
238 # "include" is specified relative to subdict_path, so compute the real
239 # path to include by appending the provided "include" to the directory
240 # in which subdict_path resides.
241 relative_include = \
242 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
243 includes_list.append(relative_include)
244 # Unhook the includes list, it's no longer needed.
245 del subdict['includes']
246
247 # Merge in the included files.
248 for include in includes_list:
249 if not 'included' in aux_data[subdict_path]:
250 aux_data[subdict_path]['included'] = []
251 aux_data[subdict_path]['included'].append(include)
252
253 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include)
254
255 MergeDicts(subdict,
256 LoadOneBuildFile(include, data, aux_data, variables, None,
257 False, check),
258 subdict_path, include)
259
260 # Recurse into subdictionaries.
261 for k, v in subdict.iteritems():
262 if v.__class__ == dict:
263 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
264 None, check)
265 elif v.__class__ == list:
266 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
267 check)
268
269
270 # This recurses into lists so that it can look for dicts.
271 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
272 variables, check):
273 for item in sublist:
274 if item.__class__ == dict:
275 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
276 variables, None, check)
277 elif item.__class__ == list:
278 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
279 variables, check)
280
281 # Processes toolsets in all the targets. This recurses into condition entries
282 # since they can contain toolsets as well.
283 def ProcessToolsetsInDict(data):
284 if 'targets' in data:
285 target_list = data['targets']
286 new_target_list = []
287 for target in target_list:
288 global multiple_toolsets
289 if multiple_toolsets:
290 toolsets = target.get('toolsets', ['target'])
291 else:
292 toolsets = ['target']
293 if len(toolsets) > 0:
294 # Optimization: only do copies if more than one toolset is specified.
295 for build in toolsets[1:]:
296 new_target = copy.deepcopy(target)
297 new_target['toolset'] = build
298 new_target_list.append(new_target)
299 target['toolset'] = toolsets[0]
300 new_target_list.append(target)
301 data['targets'] = new_target_list
302 if 'conditions' in data:
303 for condition in data['conditions']:
304 if isinstance(condition, list):
305 for condition_dict in condition[1:]:
306 ProcessToolsetsInDict(condition_dict)
307
308
309 # TODO(mark): I don't love this name. It just means that it's going to load
310 # a build file that contains targets and is expected to provide a targets dict
311 # that contains the targets...
312 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
313 depth, check):
314 global absolute_build_file_paths
315
316 # If depth is set, predefine the DEPTH variable to be a relative path from
317 # this build file's directory to the directory identified by depth.
318 if depth:
319 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
320 # temporary measure. This should really be addressed by keeping all paths
321 # in POSIX until actual project generation.
322 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
323 if d == '':
324 variables['DEPTH'] = '.'
325 else:
326 variables['DEPTH'] = d.replace('\\', '/')
327
328 # If the generator needs absolue paths, then do so.
329 if absolute_build_file_paths:
330 build_file_path = os.path.abspath(build_file_path)
331
332 if build_file_path in data['target_build_files']:
333 # Already loaded.
334 return
335 data['target_build_files'].add(build_file_path)
336
337 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
338 "Loading Target Build File '%s'" % build_file_path)
339
340 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
341 includes, True, check)
342
343 # Store DEPTH for later use in generators.
344 build_file_data['_DEPTH'] = depth
345
346 # Set up the included_files key indicating which .gyp files contributed to
347 # this target dict.
348 if 'included_files' in build_file_data:
349 raise KeyError, build_file_path + ' must not contain included_files key'
350
351 included = GetIncludedBuildFiles(build_file_path, aux_data)
352 build_file_data['included_files'] = []
353 for included_file in included:
354 # included_file is relative to the current directory, but it needs to
355 # be made relative to build_file_path's directory.
356 included_relative = \
357 gyp.common.RelativePath(included_file,
358 os.path.dirname(build_file_path))
359 build_file_data['included_files'].append(included_relative)
360
361 ProcessToolsetsInDict(build_file_data)
362
363 # Apply "pre"/"early" variable expansions and condition evaluations.
364 ProcessVariablesAndConditionsInDict(build_file_data, False, variables,
365 build_file_path)
366
367 # Look at each project's target_defaults dict, and merge settings into
368 # targets.
369 if 'target_defaults' in build_file_data:
370 index = 0
371 if 'targets' in build_file_data:
372 while index < len(build_file_data['targets']):
373 # This procedure needs to give the impression that target_defaults is
374 # used as defaults, and the individual targets inherit from that.
375 # The individual targets need to be merged into the defaults. Make
376 # a deep copy of the defaults for each target, merge the target dict
377 # as found in the input file into that copy, and then hook up the
378 # copy with the target-specific data merged into it as the replacement
379 # target dict.
380 old_target_dict = build_file_data['targets'][index]
381 new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
382 MergeDicts(new_target_dict, old_target_dict,
383 build_file_path, build_file_path)
384 build_file_data['targets'][index] = new_target_dict
385 index = index + 1
386 else:
387 raise Exception, \
388 "Unable to find targets in build file %s" % build_file_path
389
390 # No longer needed.
391 del build_file_data['target_defaults']
392
393 # Look for dependencies. This means that dependency resolution occurs
394 # after "pre" conditionals and variable expansion, but before "post" -
395 # in other words, you can't put a "dependencies" section inside a "post"
396 # conditional within a target.
397
398 if 'targets' in build_file_data:
399 for target_dict in build_file_data['targets']:
400 if 'dependencies' not in target_dict:
401 continue
402 for dependency in target_dict['dependencies']:
403 other_build_file = \
404 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
405 try:
406 LoadTargetBuildFile(other_build_file, data, aux_data, variables,
407 includes, depth, check)
408 except Exception, e:
409 gyp.common.ExceptionAppend(
410 e, 'while loading dependencies of %s' % build_file_path)
411 raise
412
413 return data
414
415
416 # Look for the bracket that matches the first bracket seen in a
417 # string, and return the start and end as a tuple. For example, if
418 # the input is something like "<(foo <(bar)) blah", then it would
419 # return (1, 13), indicating the entire string except for the leading
420 # "<" and trailing " blah".
421 def FindEnclosingBracketGroup(input):
422 brackets = { '}': '{',
423 ']': '[',
424 ')': '(', }
425 stack = []
426 count = 0
427 start = -1
428 for char in input:
429 if char in brackets.values():
430 stack.append(char)
431 if start == -1:
432 start = count
433 if char in brackets.keys():
434 try:
435 last_bracket = stack.pop()
436 except IndexError:
437 return (-1, -1)
438 if last_bracket != brackets[char]:
439 return (-1, -1)
440 if len(stack) == 0:
441 return (start, count + 1)
442 count = count + 1
443 return (-1, -1)
444
445
446 canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$')
447
448
449 def IsStrCanonicalInt(string):
450 """Returns True if |string| is in its canonical integer form.
451
452 The canonical form is such that str(int(string)) == string.
453 """
454 if not isinstance(string, str) or not canonical_int_re.match(string):
455 return False
456
457 return True
458
459
460 early_variable_re = re.compile('(?P<replace>(?P<type><((!?@?)|\|)?)'
461 '\((?P<is_array>\s*\[?)'
462 '(?P<content>.*?)(\]?)\))')
463 late_variable_re = re.compile('(?P<replace>(?P<type>>((!?@?)|\|)?)'
464 '\((?P<is_array>\s*\[?)'
465 '(?P<content>.*?)(\]?)\))')
466
467 # Global cache of results from running commands so they don't have to be run
468 # more then once.
469 cached_command_results = {}
470
471
472 def FixupPlatformCommand(cmd):
473 if sys.platform == 'win32':
474 if type(cmd) == list:
475 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
476 else:
477 cmd = re.sub('^cat ', 'type ', cmd)
478 return cmd
479
480
481 def ExpandVariables(input, is_late, variables, build_file):
482 # Look for the pattern that gets expanded into variables
483 if not is_late:
484 variable_re = early_variable_re
485 expansion_symbol = '<'
486 else:
487 variable_re = late_variable_re
488 expansion_symbol = '>'
489
490 input_str = str(input)
491 # Do a quick scan to determine if an expensive regex search is warranted.
492 if expansion_symbol in input_str:
493 # Get the entire list of matches as a list of MatchObject instances.
494 # (using findall here would return strings instead of MatchObjects).
495 matches = [match for match in variable_re.finditer(input_str)]
496 else:
497 matches = None
498
499 output = input_str
500 if matches:
501 # Reverse the list of matches so that replacements are done right-to-left.
502 # That ensures that earlier replacements won't mess up the string in a
503 # way that causes later calls to find the earlier substituted text instead
504 # of what's intended for replacement.
505 matches.reverse()
506 for match_group in matches:
507 match = match_group.groupdict()
508 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
509 "Matches: %s" % repr(match))
510 # match['replace'] is the substring to look for, match['type']
511 # is the character code for the replacement type (< > <! >! <| >| <@
512 # >@ <!@ >!@), match['is_array'] contains a '[' for command
513 # arrays, and match['content'] is the name of the variable (< >)
514 # or command to run (<! >!).
515
516 # run_command is true if a ! variant is used.
517 run_command = '!' in match['type']
518
519 # file_list is true if a | variant is used.
520 file_list = '|' in match['type']
521
522 # Capture these now so we can adjust them later.
523 replace_start = match_group.start('replace')
524 replace_end = match_group.end('replace')
525
526 # Find the ending paren, and re-evaluate the contained string.
527 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
528
529 # Adjust the replacement range to match the entire command
530 # found by FindEnclosingBracketGroup (since the variable_re
531 # probably doesn't match the entire command if it contained
532 # nested variables).
533 replace_end = replace_start + c_end
534
535 # Find the "real" replacement, matching the appropriate closing
536 # paren, and adjust the replacement start and end.
537 replacement = input_str[replace_start:replace_end]
538
539 # Figure out what the contents of the variable parens are.
540 contents_start = replace_start + c_start + 1
541 contents_end = replace_end - 1
542 contents = input_str[contents_start:contents_end]
543
544 # Do filter substitution now for <|().
545 # Admittedly, this is different than the evaluation order in other
546 # contexts. However, since filtration has no chance to run on <|(),
547 # this seems like the only obvious way to give them access to filters.
548 if file_list:
549 processed_variables = copy.deepcopy(variables)
550 ProcessListFiltersInDict(contents, processed_variables)
551 # Recurse to expand variables in the contents
552 contents = ExpandVariables(contents, is_late,
553 processed_variables, build_file)
554 else:
555 # Recurse to expand variables in the contents
556 contents = ExpandVariables(contents, is_late, variables, build_file)
557
558 # Strip off leading/trailing whitespace so that variable matches are
559 # simpler below (and because they are rarely needed).
560 contents = contents.strip()
561
562 # expand_to_list is true if an @ variant is used. In that case,
563 # the expansion should result in a list. Note that the caller
564 # is to be expecting a list in return, and not all callers do
565 # because not all are working in list context. Also, for list
566 # expansions, there can be no other text besides the variable
567 # expansion in the input string.
568 expand_to_list = '@' in match['type'] and input_str == replacement
569
570 if run_command or file_list:
571 # Find the build file's directory, so commands can be run or file lists
572 # generated relative to it.
573 build_file_dir = os.path.dirname(build_file)
574 if build_file_dir == '':
575 # If build_file is just a leaf filename indicating a file in the
576 # current directory, build_file_dir might be an empty string. Set
577 # it to None to signal to subprocess.Popen that it should run the
578 # command in the current directory.
579 build_file_dir = None
580
581 # Support <|(listfile.txt ...) which generates a file
582 # containing items from a gyp list, generated at gyp time.
583 # This works around actions/rules which have more inputs than will
584 # fit on the command line.
585 if file_list:
586 if type(contents) == list:
587 contents_list = contents
588 else:
589 contents_list = contents.split(' ')
590 replacement = contents_list[0]
591 path = replacement
592 if not os.path.isabs(path):
593 path = os.path.join(build_file_dir, path)
594 f = gyp.common.WriteOnDiff(path)
595 for i in contents_list[1:]:
596 f.write('%s\n' % i)
597 f.close()
598
599 elif run_command:
600 use_shell = True
601 if match['is_array']:
602 contents = eval(contents)
603 use_shell = False
604
605 # Check for a cached value to avoid executing commands, or generating
606 # file lists more than once.
607 # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
608 # possible that the command being invoked depends on the current
609 # directory. For that case the syntax needs to be extended so that the
610 # directory is also used in cache_key (it becomes a tuple).
611 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
612 # someone could author a set of GYP files where each time the command
613 # is invoked it produces different output by design. When the need
614 # arises, the syntax should be extended to support no caching off a
615 # command's output so it is run every time.
616 cache_key = str(contents)
617 cached_value = cached_command_results.get(cache_key, None)
618 if cached_value is None:
619 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
620 "Executing command '%s' in directory '%s'" %
621 (contents,build_file_dir))
622
623 # Fix up command with platform specific workarounds.
624 contents = FixupPlatformCommand(contents)
625 p = subprocess.Popen(contents, shell=use_shell,
626 stdout=subprocess.PIPE,
627 stderr=subprocess.PIPE,
628 stdin=subprocess.PIPE,
629 cwd=build_file_dir)
630
631 (p_stdout, p_stderr) = p.communicate('')
632
633 if p.wait() != 0 or p_stderr:
634 sys.stderr.write(p_stderr)
635 # Simulate check_call behavior, since check_call only exists
636 # in python 2.5 and later.
637 raise Exception("Call to '%s' returned exit status %d." %
638 (contents, p.returncode))
639 replacement = p_stdout.rstrip()
640
641 cached_command_results[cache_key] = replacement
642 else:
643 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
644 "Had cache value for command '%s' in directory '%s'" %
645 (contents,build_file_dir))
646 replacement = cached_value
647
648 else:
649 if not contents in variables:
650 raise KeyError, 'Undefined variable ' + contents + \
651 ' in ' + build_file
652 replacement = variables[contents]
653
654 if isinstance(replacement, list):
655 for item in replacement:
656 if not isinstance(item, str) and not isinstance(item, int):
657 raise TypeError, 'Variable ' + contents + \
658 ' must expand to a string or list of strings; ' + \
659 'list contains a ' + \
660 item.__class__.__name__
661 # Run through the list and handle variable expansions in it. Since
662 # the list is guaranteed not to contain dicts, this won't do anything
663 # with conditions sections.
664 ProcessVariablesAndConditionsInList(replacement, is_late, variables,
665 build_file)
666 elif not isinstance(replacement, str) and \
667 not isinstance(replacement, int):
668 raise TypeError, 'Variable ' + contents + \
669 ' must expand to a string or list of strings; ' + \
670 'found a ' + replacement.__class__.__name__
671
672 if expand_to_list:
673 # Expanding in list context. It's guaranteed that there's only one
674 # replacement to do in |input_str| and that it's this replacement. See
675 # above.
676 if isinstance(replacement, list):
677 # If it's already a list, make a copy.
678 output = replacement[:]
679 else:
680 # Split it the same way sh would split arguments.
681 output = shlex.split(str(replacement))
682 else:
683 # Expanding in string context.
684 encoded_replacement = ''
685 if isinstance(replacement, list):
686 # When expanding a list into string context, turn the list items
687 # into a string in a way that will work with a subprocess call.
688 #
689 # TODO(mark): This isn't completely correct. This should
690 # call a generator-provided function that observes the
691 # proper list-to-argument quoting rules on a specific
692 # platform instead of just calling the POSIX encoding
693 # routine.
694 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
695 else:
696 encoded_replacement = replacement
697
698 output = output[:replace_start] + str(encoded_replacement) + \
699 output[replace_end:]
700 # Prepare for the next match iteration.
701 input_str = output
702
703 # Look for more matches now that we've replaced some, to deal with
704 # expanding local variables (variables defined in the same
705 # variables block as this one).
706 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
707 "Found output %s, recursing." % repr(output))
708 if isinstance(output, list):
709 new_output = []
710 for item in output:
711 new_output.append(ExpandVariables(item, is_late, variables, build_file))
712 output = new_output
713 else:
714 output = ExpandVariables(output, is_late, variables, build_file)
715
716 # Convert all strings that are canonically-represented integers into integers.
717 if isinstance(output, list):
718 for index in xrange(0, len(output)):
719 if IsStrCanonicalInt(output[index]):
720 output[index] = int(output[index])
721 elif IsStrCanonicalInt(output):
722 output = int(output)
723
724 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
725 "Expanding %s to %s" % (repr(input), repr(output)))
726 return output
727
728
729 def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
730 # Process a 'conditions' or 'target_conditions' section in the_dict,
731 # depending on is_late. If is_late is False, 'conditions' is used.
732 #
733 # Each item in a conditions list consists of cond_expr, a string expression
734 # evaluated as the condition, and true_dict, a dict that will be merged into
735 # the_dict if cond_expr evaluates to true. Optionally, a third item,
736 # false_dict, may be present. false_dict is merged into the_dict if
737 # cond_expr evaluates to false.
738 #
739 # Any dict merged into the_dict will be recursively processed for nested
740 # conditionals and other expansions, also according to is_late, immediately
741 # prior to being merged.
742
743 if not is_late:
744 conditions_key = 'conditions'
745 else:
746 conditions_key = 'target_conditions'
747
748 if not conditions_key in the_dict:
749 return
750
751 conditions_list = the_dict[conditions_key]
752 # Unhook the conditions list, it's no longer needed.
753 del the_dict[conditions_key]
754
755 for condition in conditions_list:
756 if not isinstance(condition, list):
757 raise TypeError, conditions_key + ' must be a list'
758 if len(condition) != 2 and len(condition) != 3:
759 # It's possible that condition[0] won't work in which case this
760 # attempt will raise its own IndexError. That's probably fine.
761 raise IndexError, conditions_key + ' ' + condition[0] + \
762 ' must be length 2 or 3, not ' + len(condition)
763
764 [cond_expr, true_dict] = condition[0:2]
765 false_dict = None
766 if len(condition) == 3:
767 false_dict = condition[2]
768
769 # Do expansions on the condition itself. Since the conditon can naturally
770 # contain variable references without needing to resort to GYP expansion
771 # syntax, this is of dubious value for variables, but someone might want to
772 # use a command expansion directly inside a condition.
773 cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables,
774 build_file)
775 if not isinstance(cond_expr_expanded, str) and \
776 not isinstance(cond_expr_expanded, int):
777 raise ValueError, \
778 'Variable expansion in this context permits str and int ' + \
779 'only, found ' + expanded.__class__.__name__
780
781 try:
782 ast_code = compile(cond_expr_expanded, '<string>', 'eval')
783
784 if eval(ast_code, {'__builtins__': None}, variables):
785 merge_dict = true_dict
786 else:
787 merge_dict = false_dict
788 except SyntaxError, e:
789 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
790 'at character %d.' %
791 (str(e.args[0]), e.text, build_file, e.offset),
792 e.filename, e.lineno, e.offset, e.text)
793 raise syntax_error
794 except NameError, e:
795 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
796 (cond_expr_expanded, build_file))
797 raise
798
799 if merge_dict != None:
800 # Expand variables and nested conditinals in the merge_dict before
801 # merging it.
802 ProcessVariablesAndConditionsInDict(merge_dict, is_late,
803 variables, build_file)
804
805 MergeDicts(the_dict, merge_dict, build_file, build_file)
806
807
808 def LoadAutomaticVariablesFromDict(variables, the_dict):
809 # Any keys with plain string values in the_dict become automatic variables.
810 # The variable name is the key name with a "_" character prepended.
811 for key, value in the_dict.iteritems():
812 if isinstance(value, str) or isinstance(value, int) or \
813 isinstance(value, list):
814 variables['_' + key] = value
815
816
817 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
818 # Any keys in the_dict's "variables" dict, if it has one, becomes a
819 # variable. The variable name is the key name in the "variables" dict.
820 # Variables that end with the % character are set only if they are unset in
821 # the variables dict. the_dict_key is the name of the key that accesses
822 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
823 # (it could be a list or it could be parentless because it is a root dict),
824 # the_dict_key will be None.
825 for key, value in the_dict.get('variables', {}).iteritems():
826 if not isinstance(value, str) and not isinstance(value, int) and \
827 not isinstance(value, list):
828 continue
829
830 if key.endswith('%'):
831 variable_name = key[:-1]
832 if variable_name in variables:
833 # If the variable is already set, don't set it.
834 continue
835 if the_dict_key is 'variables' and variable_name in the_dict:
836 # If the variable is set without a % in the_dict, and the_dict is a
837 # variables dict (making |variables| a varaibles sub-dict of a
838 # variables dict), use the_dict's definition.
839 value = the_dict[variable_name]
840 else:
841 variable_name = key
842
843 variables[variable_name] = value
844
845
846 def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
847 build_file, the_dict_key=None):
848 """Handle all variable and command expansion and conditional evaluation.
849
850 This function is the public entry point for all variable expansions and
851 conditional evaluations. The variables_in dictionary will not be modified
852 by this function.
853 """
854
855 # Make a copy of the variables_in dict that can be modified during the
856 # loading of automatics and the loading of the variables dict.
857 variables = variables_in.copy()
858 LoadAutomaticVariablesFromDict(variables, the_dict)
859
860 if 'variables' in the_dict:
861 # Make sure all the local variables are added to the variables
862 # list before we process them so that you can reference one
863 # variable from another. They will be fully expanded by recursion
864 # in ExpandVariables.
865 for key, value in the_dict['variables'].iteritems():
866 variables[key] = value
867
868 # Handle the associated variables dict first, so that any variable
869 # references within can be resolved prior to using them as variables.
870 # Pass a copy of the variables dict to avoid having it be tainted.
871 # Otherwise, it would have extra automatics added for everything that
872 # should just be an ordinary variable in this scope.
873 ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late,
874 variables, build_file, 'variables')
875
876 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
877
878 for key, value in the_dict.iteritems():
879 # Skip "variables", which was already processed if present.
880 if key != 'variables' and isinstance(value, str):
881 expanded = ExpandVariables(value, is_late, variables, build_file)
882 if not isinstance(expanded, str) and not isinstance(expanded, int):
883 raise ValueError, \
884 'Variable expansion in this context permits str and int ' + \
885 'only, found ' + expanded.__class__.__name__ + ' for ' + key
886 the_dict[key] = expanded
887
888 # Variable expansion may have resulted in changes to automatics. Reload.
889 # TODO(mark): Optimization: only reload if no changes were made.
890 variables = variables_in.copy()
891 LoadAutomaticVariablesFromDict(variables, the_dict)
892 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
893
894 # Process conditions in this dict. This is done after variable expansion
895 # so that conditions may take advantage of expanded variables. For example,
896 # if the_dict contains:
897 # {'type': '<(library_type)',
898 # 'conditions': [['_type=="static_library"', { ... }]]},
899 # _type, as used in the condition, will only be set to the value of
900 # library_type if variable expansion is performed before condition
901 # processing. However, condition processing should occur prior to recursion
902 # so that variables (both automatic and "variables" dict type) may be
903 # adjusted by conditions sections, merged into the_dict, and have the
904 # intended impact on contained dicts.
905 #
906 # This arrangement means that a "conditions" section containing a "variables"
907 # section will only have those variables effective in subdicts, not in
908 # the_dict. The workaround is to put a "conditions" section within a
909 # "variables" section. For example:
910 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
911 # 'defines': ['<(define)'],
912 # 'my_subdict': {'defines': ['<(define)']}},
913 # will not result in "IS_MAC" being appended to the "defines" list in the
914 # current scope but would result in it being appended to the "defines" list
915 # within "my_subdict". By comparison:
916 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
917 # 'defines': ['<(define)'],
918 # 'my_subdict': {'defines': ['<(define)']}},
919 # will append "IS_MAC" to both "defines" lists.
920
921 # Evaluate conditions sections, allowing variable expansions within them
922 # as well as nested conditionals. This will process a 'conditions' or
923 # 'target_conditions' section, perform appropriate merging and recursive
924 # conditional and variable processing, and then remove the conditions section
925 # from the_dict if it is present.
926 ProcessConditionsInDict(the_dict, is_late, variables, build_file)
927
928 # Conditional processing may have resulted in changes to automatics or the
929 # variables dict. Reload.
930 variables = variables_in.copy()
931 LoadAutomaticVariablesFromDict(variables, the_dict)
932 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
933
934 # Recurse into child dicts, or process child lists which may result in
935 # further recursion into descendant dicts.
936 for key, value in the_dict.iteritems():
937 # Skip "variables" and string values, which were already processed if
938 # present.
939 if key == 'variables' or isinstance(value, str):
940 continue
941 if isinstance(value, dict):
942 # Pass a copy of the variables dict so that subdicts can't influence
943 # parents.
944 ProcessVariablesAndConditionsInDict(value, is_late, variables,
945 build_file, key)
946 elif isinstance(value, list):
947 # The list itself can't influence the variables dict, and
948 # ProcessVariablesAndConditionsInList will make copies of the variables
949 # dict if it needs to pass it to something that can influence it. No
950 # copy is necessary here.
951 ProcessVariablesAndConditionsInList(value, is_late, variables,
952 build_file)
953 elif not isinstance(value, int):
954 raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
955 ' for ' + key
956
957
958 def ProcessVariablesAndConditionsInList(the_list, is_late, variables,
959 build_file):
960 # Iterate using an index so that new values can be assigned into the_list.
961 index = 0
962 while index < len(the_list):
963 item = the_list[index]
964 if isinstance(item, dict):
965 # Make a copy of the variables dict so that it won't influence anything
966 # outside of its own scope.
967 ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file)
968 elif isinstance(item, list):
969 ProcessVariablesAndConditionsInList(item, is_late, variables, build_file)
970 elif isinstance(item, str):
971 expanded = ExpandVariables(item, is_late, variables, build_file)
972 if isinstance(expanded, str) or isinstance(expanded, int):
973 the_list[index] = expanded
974 elif isinstance(expanded, list):
975 del the_list[index]
976 for expanded_item in expanded:
977 the_list.insert(index, expanded_item)
978 index = index + 1
979
980 # index now identifies the next item to examine. Continue right now
981 # without falling into the index increment below.
982 continue
983 else:
984 raise ValueError, \
985 'Variable expansion in this context permits strings and ' + \
986 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
987 index
988 elif not isinstance(item, int):
989 raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
990 ' at index ' + index
991 index = index + 1
992
993
994 def BuildTargetsDict(data):
995 """Builds a dict mapping fully-qualified target names to their target dicts.
996
997 |data| is a dict mapping loaded build files by pathname relative to the
998 current directory. Values in |data| are build file contents. For each
999 |data| value with a "targets" key, the value of the "targets" key is taken
1000 as a list containing target dicts. Each target's fully-qualified name is
1001 constructed from the pathname of the build file (|data| key) and its
1002 "target_name" property. These fully-qualified names are used as the keys
1003 in the returned dict. These keys provide access to the target dicts,
1004 the dicts in the "targets" lists.
1005 """
1006
1007 targets = {}
1008 for build_file in data['target_build_files']:
1009 for target in data[build_file].get('targets', []):
1010 target_name = gyp.common.QualifiedTarget(build_file,
1011 target['target_name'],
1012 target['toolset'])
1013 if target_name in targets:
1014 raise KeyError, 'Duplicate target definitions for ' + target_name
1015 targets[target_name] = target
1016
1017 return targets
1018
1019
1020 def QualifyDependencies(targets):
1021 """Make dependency links fully-qualified relative to the current directory.
1022
1023 |targets| is a dict mapping fully-qualified target names to their target
1024 dicts. For each target in this dict, keys known to contain dependency
1025 links are examined, and any dependencies referenced will be rewritten
1026 so that they are fully-qualified and relative to the current directory.
1027 All rewritten dependencies are suitable for use as keys to |targets| or a
1028 similar dict.
1029 """
1030
1031 for target, target_dict in targets.iteritems():
1032 target_build_file = gyp.common.BuildFile(target)
1033 toolset = target_dict['toolset']
1034 for dependency_key in dependency_sections:
1035 dependencies = target_dict.get(dependency_key, [])
1036 for index in xrange(0, len(dependencies)):
1037 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1038 target_build_file, dependencies[index], toolset)
1039 global multiple_toolsets
1040 if not multiple_toolsets:
1041 # Ignore toolset specification in the dependency if it is specified.
1042 dep_toolset = toolset
1043 dependency = gyp.common.QualifiedTarget(dep_file,
1044 dep_target,
1045 dep_toolset)
1046 dependencies[index] = dependency
1047
1048 # Make sure anything appearing in a list other than "dependencies" also
1049 # appears in the "dependencies" list.
1050 if dependency_key != 'dependencies' and \
1051 dependency not in target_dict['dependencies']:
1052 raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \
1053 ' of ' + target + ', but not in dependencies'
1054
1055
1056 def ExpandWildcardDependencies(targets, data):
1057 """Expands dependencies specified as build_file:*.
1058
1059 For each target in |targets|, examines sections containing links to other
1060 targets. If any such section contains a link of the form build_file:*, it
1061 is taken as a wildcard link, and is expanded to list each target in
1062 build_file. The |data| dict provides access to build file dicts.
1063
1064 Any target that does not wish to be included by wildcard can provide an
1065 optional "suppress_wildcard" key in its target dict. When present and
1066 true, a wildcard dependency link will not include such targets.
1067
1068 All dependency names, including the keys to |targets| and the values in each
1069 dependency list, must be qualified when this function is called.
1070 """
1071
1072 for target, target_dict in targets.iteritems():
1073 toolset = target_dict['toolset']
1074 target_build_file = gyp.common.BuildFile(target)
1075 for dependency_key in dependency_sections:
1076 dependencies = target_dict.get(dependency_key, [])
1077
1078 # Loop this way instead of "for dependency in" or "for index in xrange"
1079 # because the dependencies list will be modified within the loop body.
1080 index = 0
1081 while index < len(dependencies):
1082 (dependency_build_file, dependency_target, dependency_toolset) = \
1083 gyp.common.ParseQualifiedTarget(dependencies[index])
1084 if dependency_target != '*' and dependency_toolset != '*':
1085 # Not a wildcard. Keep it moving.
1086 index = index + 1
1087 continue
1088
1089 if dependency_build_file == target_build_file:
1090 # It's an error for a target to depend on all other targets in
1091 # the same file, because a target cannot depend on itself.
1092 raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \
1093 target + ' referring to same build file'
1094
1095 # Take the wildcard out and adjust the index so that the next
1096 # dependency in the list will be processed the next time through the
1097 # loop.
1098 del dependencies[index]
1099 index = index - 1
1100
1101 # Loop through the targets in the other build file, adding them to
1102 # this target's list of dependencies in place of the removed
1103 # wildcard.
1104 dependency_target_dicts = data[dependency_build_file]['targets']
1105 for dependency_target_dict in dependency_target_dicts:
1106 if int(dependency_target_dict.get('suppress_wildcard', False)):
1107 continue
1108 dependency_target_name = dependency_target_dict['target_name']
1109 if (dependency_target != '*' and
1110 dependency_target != dependency_target_name):
1111 continue
1112 dependency_target_toolset = dependency_target_dict['toolset']
1113 if (dependency_toolset != '*' and
1114 dependency_toolset != dependency_target_toolset):
1115 continue
1116 dependency = gyp.common.QualifiedTarget(dependency_build_file,
1117 dependency_target_name,
1118 dependency_target_toolset)
1119 index = index + 1
1120 dependencies.insert(index, dependency)
1121
1122 index = index + 1
1123
1124
1125 class DependencyGraphNode(object):
1126 """
1127
1128 Attributes:
1129 ref: A reference to an object that this DependencyGraphNode represents.
1130 dependencies: List of DependencyGraphNodes on which this one depends.
1131 dependents: List of DependencyGraphNodes that depend on this one.
1132 """
1133
1134 class CircularException(Exception):
1135 pass
1136
1137 def __init__(self, ref):
1138 self.ref = ref
1139 self.dependencies = []
1140 self.dependents = []
1141
1142 def FlattenToList(self):
1143 # flat_list is the sorted list of dependencies - actually, the list items
1144 # are the "ref" attributes of DependencyGraphNodes. Every target will
1145 # appear in flat_list after all of its dependencies, and before all of its
1146 # dependents.
1147 flat_list = []
1148
1149 # in_degree_zeros is the list of DependencyGraphNodes that have no
1150 # dependencies not in flat_list. Initially, it is a copy of the children
1151 # of this node, because when the graph was built, nodes with no
1152 # dependencies were made implicit dependents of the root node.
1153 in_degree_zeros = self.dependents[:]
1154
1155 while in_degree_zeros:
1156 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1157 # can be appended to flat_list. Take these nodes out of in_degree_zeros
1158 # as work progresses, so that the next node to process from the list can
1159 # always be accessed at a consistent position.
1160 node = in_degree_zeros.pop(0)
1161 flat_list.append(node.ref)
1162
1163 # Look at dependents of the node just added to flat_list. Some of them
1164 # may now belong in in_degree_zeros.
1165 for node_dependent in node.dependents:
1166 is_in_degree_zero = True
1167 for node_dependent_dependency in node_dependent.dependencies:
1168 if not node_dependent_dependency.ref in flat_list:
1169 # The dependent one or more dependencies not in flat_list. There
1170 # will be more chances to add it to flat_list when examining
1171 # it again as a dependent of those other dependencies, provided
1172 # that there are no cycles.
1173 is_in_degree_zero = False
1174 break
1175
1176 if is_in_degree_zero:
1177 # All of the dependent's dependencies are already in flat_list. Add
1178 # it to in_degree_zeros where it will be processed in a future
1179 # iteration of the outer loop.
1180 in_degree_zeros.append(node_dependent)
1181
1182 return flat_list
1183
1184 def DirectDependencies(self, dependencies=None):
1185 """Returns a list of just direct dependencies."""
1186 if dependencies == None:
1187 dependencies = []
1188
1189 for dependency in self.dependencies:
1190 # Check for None, corresponding to the root node.
1191 if dependency.ref != None and dependency.ref not in dependencies:
1192 dependencies.append(dependency.ref)
1193
1194 return dependencies
1195
1196 def _AddImportedDependencies(self, targets, dependencies=None):
1197 """Given a list of direct dependencies, adds indirect dependencies that
1198 other dependencies have declared to export their settings.
1199
1200 This method does not operate on self. Rather, it operates on the list
1201 of dependencies in the |dependencies| argument. For each dependency in
1202 that list, if any declares that it exports the settings of one of its
1203 own dependencies, those dependencies whose settings are "passed through"
1204 are added to the list. As new items are added to the list, they too will
1205 be processed, so it is possible to import settings through multiple levels
1206 of dependencies.
1207
1208 This method is not terribly useful on its own, it depends on being
1209 "primed" with a list of direct dependencies such as one provided by
1210 DirectDependencies. DirectAndImportedDependencies is intended to be the
1211 public entry point.
1212 """
1213
1214 if dependencies == None:
1215 dependencies = []
1216
1217 index = 0
1218 while index < len(dependencies):
1219 dependency = dependencies[index]
1220 dependency_dict = targets[dependency]
1221 # Add any dependencies whose settings should be imported to the list
1222 # if not already present. Newly-added items will be checked for
1223 # their own imports when the list iteration reaches them.
1224 # Rather than simply appending new items, insert them after the
1225 # dependency that exported them. This is done to more closely match
1226 # the depth-first method used by DeepDependencies.
1227 add_index = 1
1228 for imported_dependency in \
1229 dependency_dict.get('export_dependent_settings', []):
1230 if imported_dependency not in dependencies:
1231 dependencies.insert(index + add_index, imported_dependency)
1232 add_index = add_index + 1
1233 index = index + 1
1234
1235 return dependencies
1236
1237 def DirectAndImportedDependencies(self, targets, dependencies=None):
1238 """Returns a list of a target's direct dependencies and all indirect
1239 dependencies that a dependency has advertised settings should be exported
1240 through the dependency for.
1241 """
1242
1243 dependencies = self.DirectDependencies(dependencies)
1244 return self._AddImportedDependencies(targets, dependencies)
1245
1246 def DeepDependencies(self, dependencies=None):
1247 """Returns a list of all of a target's dependencies, recursively."""
1248 if dependencies == None:
1249 dependencies = []
1250
1251 for dependency in self.dependencies:
1252 # Check for None, corresponding to the root node.
1253 if dependency.ref != None and dependency.ref not in dependencies:
1254 dependencies.append(dependency.ref)
1255 dependency.DeepDependencies(dependencies)
1256
1257 return dependencies
1258
1259 def LinkDependencies(self, targets, dependencies=None, initial=True):
1260 """Returns a list of dependency targets that are linked into this target.
1261
1262 This function has a split personality, depending on the setting of
1263 |initial|. Outside callers should always leave |initial| at its default
1264 setting.
1265
1266 When adding a target to the list of dependencies, this function will
1267 recurse into itself with |initial| set to False, to collect depenedencies
1268 that are linked into the linkable target for which the list is being built.
1269 """
1270 if dependencies == None:
1271 dependencies = []
1272
1273 # Check for None, corresponding to the root node.
1274 if self.ref == None:
1275 return dependencies
1276
1277 # It's kind of sucky that |targets| has to be passed into this function,
1278 # but that's presently the easiest way to access the target dicts so that
1279 # this function can find target types.
1280
1281 if not 'target_name' in targets[self.ref]:
1282 raise Exception("Missing 'target_name' field in target.")
1283
1284 try:
1285 target_type = targets[self.ref]['type']
1286 except KeyError, e:
1287 raise Exception("Missing 'type' field in target %s" %
1288 targets[self.ref]['target_name'])
1289
1290 is_linkable = target_type in linkable_types
1291
1292 if initial and not is_linkable:
1293 # If this is the first target being examined and it's not linkable,
1294 # return an empty list of link dependencies, because the link
1295 # dependencies are intended to apply to the target itself (initial is
1296 # True) and this target won't be linked.
1297 return dependencies
1298
1299 # Executables and loadable modules are already fully and finally linked.
1300 # Nothing else can be a link dependency of them, there can only be
1301 # dependencies in the sense that a dependent target might run an
1302 # executable or load the loadable_module.
1303 if not initial and target_type in ('executable', 'loadable_module'):
1304 return dependencies
1305
1306 # The target is linkable, add it to the list of link dependencies.
1307 if self.ref not in dependencies:
1308 if target_type != 'none':
1309 # Special case: "none" type targets don't produce any linkable products
1310 # and shouldn't be exposed as link dependencies, although dependencies
1311 # of "none" type targets may still be link dependencies.
1312 dependencies.append(self.ref)
1313 if initial or not is_linkable:
1314 # If this is a subsequent target and it's linkable, don't look any
1315 # further for linkable dependencies, as they'll already be linked into
1316 # this target linkable. Always look at dependencies of the initial
1317 # target, and always look at dependencies of non-linkables.
1318 for dependency in self.dependencies:
1319 dependency.LinkDependencies(targets, dependencies, False)
1320
1321 return dependencies
1322
1323
1324 def BuildDependencyList(targets):
1325 # Create a DependencyGraphNode for each target. Put it into a dict for easy
1326 # access.
1327 dependency_nodes = {}
1328 for target, spec in targets.iteritems():
1329 if not target in dependency_nodes:
1330 dependency_nodes[target] = DependencyGraphNode(target)
1331
1332 # Set up the dependency links. Targets that have no dependencies are treated
1333 # as dependent on root_node.
1334 root_node = DependencyGraphNode(None)
1335 for target, spec in targets.iteritems():
1336 target_node = dependency_nodes[target]
1337 target_build_file = gyp.common.BuildFile(target)
1338 if not 'dependencies' in spec or len(spec['dependencies']) == 0:
1339 target_node.dependencies = [root_node]
1340 root_node.dependents.append(target_node)
1341 else:
1342 dependencies = spec['dependencies']
1343 for index in xrange(0, len(dependencies)):
1344 try:
1345 dependency = dependencies[index]
1346 dependency_node = dependency_nodes[dependency]
1347 target_node.dependencies.append(dependency_node)
1348 dependency_node.dependents.append(target_node)
1349 except KeyError, e:
1350 gyp.common.ExceptionAppend(e,
1351 'while trying to load target %s' % target)
1352 raise
1353
1354 flat_list = root_node.FlattenToList()
1355
1356 # If there's anything left unvisited, there must be a circular dependency
1357 # (cycle). If you need to figure out what's wrong, look for elements of
1358 # targets that are not in flat_list.
1359 if len(flat_list) != len(targets):
1360 raise DependencyGraphNode.CircularException, \
1361 'Some targets not reachable, cycle in dependency graph detected'
1362
1363 return [dependency_nodes, flat_list]
1364
1365
1366 def VerifyNoGYPFileCircularDependencies(targets):
1367 # Create a DependencyGraphNode for each gyp file containing a target. Put
1368 # it into a dict for easy access.
1369 dependency_nodes = {}
1370 for target in targets.iterkeys():
1371 build_file = gyp.common.BuildFile(target)
1372 if not build_file in dependency_nodes:
1373 dependency_nodes[build_file] = DependencyGraphNode(build_file)
1374
1375 # Set up the dependency links.
1376 for target, spec in targets.iteritems():
1377 build_file = gyp.common.BuildFile(target)
1378 build_file_node = dependency_nodes[build_file]
1379 target_dependencies = spec.get('dependencies', [])
1380 for dependency in target_dependencies:
1381 try:
1382 dependency_build_file = gyp.common.BuildFile(dependency)
1383 if dependency_build_file == build_file:
1384 # A .gyp file is allowed to refer back to itself.
1385 continue
1386 dependency_node = dependency_nodes[dependency_build_file]
1387 if dependency_node not in build_file_node.dependencies:
1388 build_file_node.dependencies.append(dependency_node)
1389 dependency_node.dependents.append(build_file_node)
1390 except KeyError, e:
1391 gyp.common.ExceptionAppend(
1392 e, 'while computing dependencies of .gyp file %s' % build_file)
1393 raise
1394
1395 # Files that have no dependencies are treated as dependent on root_node.
1396 root_node = DependencyGraphNode(None)
1397 for build_file_node in dependency_nodes.itervalues():
1398 if len(build_file_node.dependencies) == 0:
1399 build_file_node.dependencies.append(root_node)
1400 root_node.dependents.append(build_file_node)
1401
1402 flat_list = root_node.FlattenToList()
1403
1404 # If there's anything left unvisited, there must be a circular dependency
1405 # (cycle).
1406 if len(flat_list) != len(dependency_nodes):
1407 bad_files = []
1408 for file in dependency_nodes.iterkeys():
1409 if not file in flat_list:
1410 bad_files.append(file)
1411 raise DependencyGraphNode.CircularException, \
1412 'Some files not reachable, cycle in .gyp file dependency graph ' + \
1413 'detected involving some or all of: ' + \
1414 ' '.join(bad_files)
1415
1416
1417 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1418 # key should be one of all_dependent_settings, direct_dependent_settings,
1419 # or link_settings.
1420
1421 for target in flat_list:
1422 target_dict = targets[target]
1423 build_file = gyp.common.BuildFile(target)
1424
1425 if key == 'all_dependent_settings':
1426 dependencies = dependency_nodes[target].DeepDependencies()
1427 elif key == 'direct_dependent_settings':
1428 dependencies = \
1429 dependency_nodes[target].DirectAndImportedDependencies(targets)
1430 elif key == 'link_settings':
1431 dependencies = dependency_nodes[target].LinkDependencies(targets)
1432 else:
1433 raise KeyError, "DoDependentSettings doesn't know how to determine " + \
1434 'dependencies for ' + key
1435
1436 for dependency in dependencies:
1437 dependency_dict = targets[dependency]
1438 if not key in dependency_dict:
1439 continue
1440 dependency_build_file = gyp.common.BuildFile(dependency)
1441 MergeDicts(target_dict, dependency_dict[key],
1442 build_file, dependency_build_file)
1443
1444
1445 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes):
1446 # Recompute target "dependencies" properties. For each static library
1447 # target, remove "dependencies" entries referring to other static libraries,
1448 # unless the dependency has the "hard_dependency" attribute set. For each
1449 # linkable target, add a "dependencies" entry referring to all of the
1450 # target's computed list of link dependencies (including static libraries
1451 # if no such entry is already present.
1452 for target in flat_list:
1453 target_dict = targets[target]
1454 target_type = target_dict['type']
1455
1456 if target_type == 'static_library':
1457 if not 'dependencies' in target_dict:
1458 continue
1459
1460 target_dict['dependencies_original'] = target_dict.get(
1461 'dependencies', [])[:]
1462
1463 index = 0
1464 while index < len(target_dict['dependencies']):
1465 dependency = target_dict['dependencies'][index]
1466 dependency_dict = targets[dependency]
1467 if dependency_dict['type'] == 'static_library' and \
1468 (not 'hard_dependency' in dependency_dict or \
1469 not dependency_dict['hard_dependency']):
1470 # A static library should not depend on another static library unless
1471 # the dependency relationship is "hard," which should only be done
1472 # when a dependent relies on some side effect other than just the
1473 # build product, like a rule or action output. Take the dependency
1474 # out of the list, and don't increment index because the next
1475 # dependency to analyze will shift into the index formerly occupied
1476 # by the one being removed.
1477 del target_dict['dependencies'][index]
1478 else:
1479 index = index + 1
1480
1481 # If the dependencies list is empty, it's not needed, so unhook it.
1482 if len(target_dict['dependencies']) == 0:
1483 del target_dict['dependencies']
1484
1485 elif target_type in linkable_types:
1486 # Get a list of dependency targets that should be linked into this
1487 # target. Add them to the dependencies list if they're not already
1488 # present.
1489
1490 link_dependencies = dependency_nodes[target].LinkDependencies(targets)
1491 for dependency in link_dependencies:
1492 if dependency == target:
1493 continue
1494 if not 'dependencies' in target_dict:
1495 target_dict['dependencies'] = []
1496 if not dependency in target_dict['dependencies']:
1497 target_dict['dependencies'].append(dependency)
1498
1499 # Initialize this here to speed up MakePathRelative.
1500 exception_re = re.compile(r'''["']?[-/$<>]''')
1501
1502
1503 def MakePathRelative(to_file, fro_file, item):
1504 # If item is a relative path, it's relative to the build file dict that it's
1505 # coming from. Fix it up to make it relative to the build file dict that
1506 # it's going into.
1507 # Exception: any |item| that begins with these special characters is
1508 # returned without modification.
1509 # / Used when a path is already absolute (shortcut optimization;
1510 # such paths would be returned as absolute anyway)
1511 # $ Used for build environment variables
1512 # - Used for some build environment flags (such as -lapr-1 in a
1513 # "libraries" section)
1514 # < Used for our own variable and command expansions (see ExpandVariables)
1515 # > Used for our own variable and command expansions (see ExpandVariables)
1516 #
1517 # "/' Used when a value is quoted. If these are present, then we
1518 # check the second character instead.
1519 #
1520 if to_file == fro_file or exception_re.match(item):
1521 return item
1522 else:
1523 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1524 # temporary measure. This should really be addressed by keeping all paths
1525 # in POSIX until actual project generation.
1526 return os.path.normpath(os.path.join(
1527 gyp.common.RelativePath(os.path.dirname(fro_file),
1528 os.path.dirname(to_file)),
1529 item)).replace('\\', '/')
1530
1531
1532 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1533 prepend_index = 0
1534
1535 for item in fro:
1536 singleton = False
1537 if isinstance(item, str) or isinstance(item, int):
1538 # The cheap and easy case.
1539 if is_paths:
1540 to_item = MakePathRelative(to_file, fro_file, item)
1541 else:
1542 to_item = item
1543
1544 if not isinstance(item, str) or not item.startswith('-'):
1545 # Any string that doesn't begin with a "-" is a singleton - it can
1546 # only appear once in a list, to be enforced by the list merge append
1547 # or prepend.
1548 singleton = True
1549 elif isinstance(item, dict):
1550 # Make a copy of the dictionary, continuing to look for paths to fix.
1551 # The other intelligent aspects of merge processing won't apply because
1552 # item is being merged into an empty dict.
1553 to_item = {}
1554 MergeDicts(to_item, item, to_file, fro_file)
1555 elif isinstance(item, list):
1556 # Recurse, making a copy of the list. If the list contains any
1557 # descendant dicts, path fixing will occur. Note that here, custom
1558 # values for is_paths and append are dropped; those are only to be
1559 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1560 # matter anyway because the new |to_item| list is empty.
1561 to_item = []
1562 MergeLists(to_item, item, to_file, fro_file)
1563 else:
1564 raise TypeError, \
1565 'Attempt to merge list item of unsupported type ' + \
1566 item.__class__.__name__
1567
1568 if append:
1569 # If appending a singleton that's already in the list, don't append.
1570 # This ensures that the earliest occurrence of the item will stay put.
1571 if not singleton or not to_item in to:
1572 to.append(to_item)
1573 else:
1574 # If prepending a singleton that's already in the list, remove the
1575 # existing instance and proceed with the prepend. This ensures that the
1576 # item appears at the earliest possible position in the list.
1577 while singleton and to_item in to:
1578 to.remove(to_item)
1579
1580 # Don't just insert everything at index 0. That would prepend the new
1581 # items to the list in reverse order, which would be an unwelcome
1582 # surprise.
1583 to.insert(prepend_index, to_item)
1584 prepend_index = prepend_index + 1
1585
1586
1587 def MergeDicts(to, fro, to_file, fro_file):
1588 # I wanted to name the parameter "from" but it's a Python keyword...
1589 for k, v in fro.iteritems():
1590 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
1591 # copy semantics. Something else may want to merge from the |fro| dict
1592 # later, and having the same dict ref pointed to twice in the tree isn't
1593 # what anyone wants considering that the dicts may subsequently be
1594 # modified.
1595 if k in to:
1596 bad_merge = False
1597 if isinstance(v, str) or isinstance(v, int):
1598 if not (isinstance(to[k], str) or isinstance(to[k], int)):
1599 bad_merge = True
1600 elif v.__class__ != to[k].__class__:
1601 bad_merge = True
1602
1603 if bad_merge:
1604 raise TypeError, \
1605 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1606 ' into incompatible type ' + to[k].__class__.__name__ + \
1607 ' for key ' + k
1608 if isinstance(v, str) or isinstance(v, int):
1609 # Overwrite the existing value, if any. Cheap and easy.
1610 is_path = IsPathSection(k)
1611 if is_path:
1612 to[k] = MakePathRelative(to_file, fro_file, v)
1613 else:
1614 to[k] = v
1615 elif isinstance(v, dict):
1616 # Recurse, guaranteeing copies will be made of objects that require it.
1617 if not k in to:
1618 to[k] = {}
1619 MergeDicts(to[k], v, to_file, fro_file)
1620 elif isinstance(v, list):
1621 # Lists in dicts can be merged with different policies, depending on
1622 # how the key in the "from" dict (k, the from-key) is written.
1623 #
1624 # If the from-key has ...the to-list will have this action
1625 # this character appended:... applied when receiving the from-list:
1626 # = replace
1627 # + prepend
1628 # ? set, only if to-list does not yet exist
1629 # (none) append
1630 #
1631 # This logic is list-specific, but since it relies on the associated
1632 # dict key, it's checked in this dict-oriented function.
1633 ext = k[-1]
1634 append = True
1635 if ext == '=':
1636 list_base = k[:-1]
1637 lists_incompatible = [list_base, list_base + '?']
1638 to[list_base] = []
1639 elif ext == '+':
1640 list_base = k[:-1]
1641 lists_incompatible = [list_base + '=', list_base + '?']
1642 append = False
1643 elif ext == '?':
1644 list_base = k[:-1]
1645 lists_incompatible = [list_base, list_base + '=', list_base + '+']
1646 else:
1647 list_base = k
1648 lists_incompatible = [list_base + '=', list_base + '?']
1649
1650 # Some combinations of merge policies appearing together are meaningless.
1651 # It's stupid to replace and append simultaneously, for example. Append
1652 # and prepend are the only policies that can coexist.
1653 for list_incompatible in lists_incompatible:
1654 if list_incompatible in fro:
1655 raise KeyError, 'Incompatible list policies ' + k + ' and ' + \
1656 list_incompatible
1657
1658 if list_base in to:
1659 if ext == '?':
1660 # If the key ends in "?", the list will only be merged if it doesn't
1661 # already exist.
1662 continue
1663 if not isinstance(to[list_base], list):
1664 # This may not have been checked above if merging in a list with an
1665 # extension character.
1666 raise TypeError, \
1667 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1668 ' into incompatible type ' + to[list_base].__class__.__name__ + \
1669 ' for key ' + list_base + '(' + k + ')'
1670 else:
1671 to[list_base] = []
1672
1673 # Call MergeLists, which will make copies of objects that require it.
1674 # MergeLists can recurse back into MergeDicts, although this will be
1675 # to make copies of dicts (with paths fixed), there will be no
1676 # subsequent dict "merging" once entering a list because lists are
1677 # always replaced, appended to, or prepended to.
1678 is_paths = IsPathSection(list_base)
1679 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
1680 else:
1681 raise TypeError, \
1682 'Attempt to merge dict value of unsupported type ' + \
1683 v.__class__.__name__ + ' for key ' + k
1684
1685
1686 def MergeConfigWithInheritance(new_configuration_dict, build_file,
1687 target_dict, configuration, visited):
1688 # Skip if previously visted.
1689 if configuration in visited:
1690 return
1691
1692 # Look at this configuration.
1693 configuration_dict = target_dict['configurations'][configuration]
1694
1695 # Merge in parents.
1696 for parent in configuration_dict.get('inherit_from', []):
1697 MergeConfigWithInheritance(new_configuration_dict, build_file,
1698 target_dict, parent, visited + [configuration])
1699
1700 # Merge it into the new config.
1701 MergeDicts(new_configuration_dict, configuration_dict,
1702 build_file, build_file)
1703
1704 # Drop abstract.
1705 if 'abstract' in new_configuration_dict:
1706 del new_configuration_dict['abstract']
1707
1708
1709 def SetUpConfigurations(target, target_dict):
1710 global non_configuration_keys
1711 # key_suffixes is a list of key suffixes that might appear on key names.
1712 # These suffixes are handled in conditional evaluations (for =, +, and ?)
1713 # and rules/exclude processing (for ! and /). Keys with these suffixes
1714 # should be treated the same as keys without.
1715 key_suffixes = ['=', '+', '?', '!', '/']
1716
1717 build_file = gyp.common.BuildFile(target)
1718
1719 # Provide a single configuration by default if none exists.
1720 # TODO(mark): Signal an error if default_configurations exists but
1721 # configurations does not.
1722 if not 'configurations' in target_dict:
1723 target_dict['configurations'] = {'Default': {}}
1724 if not 'default_configuration' in target_dict:
1725 concrete = [i for i in target_dict['configurations'].keys()
1726 if not target_dict['configurations'][i].get('abstract')]
1727 target_dict['default_configuration'] = sorted(concrete)[0]
1728
1729 for configuration in target_dict['configurations'].keys():
1730 old_configuration_dict = target_dict['configurations'][configuration]
1731 # Skip abstract configurations (saves work only).
1732 if old_configuration_dict.get('abstract'):
1733 continue
1734 # Configurations inherit (most) settings from the enclosing target scope.
1735 # Get the inheritance relationship right by making a copy of the target
1736 # dict.
1737 new_configuration_dict = copy.deepcopy(target_dict)
1738
1739 # Take out the bits that don't belong in a "configurations" section.
1740 # Since configuration setup is done before conditional, exclude, and rules
1741 # processing, be careful with handling of the suffix characters used in
1742 # those phases.
1743 delete_keys = []
1744 for key in new_configuration_dict:
1745 key_ext = key[-1:]
1746 if key_ext in key_suffixes:
1747 key_base = key[:-1]
1748 else:
1749 key_base = key
1750 if key_base in non_configuration_keys:
1751 delete_keys.append(key)
1752
1753 for key in delete_keys:
1754 del new_configuration_dict[key]
1755
1756 # Merge in configuration (with all its parents first).
1757 MergeConfigWithInheritance(new_configuration_dict, build_file,
1758 target_dict, configuration, [])
1759
1760 # Put the new result back into the target dict as a configuration.
1761 target_dict['configurations'][configuration] = new_configuration_dict
1762
1763 # Now drop all the abstract ones.
1764 for configuration in target_dict['configurations'].keys():
1765 old_configuration_dict = target_dict['configurations'][configuration]
1766 if old_configuration_dict.get('abstract'):
1767 del target_dict['configurations'][configuration]
1768
1769 # Now that all of the target's configurations have been built, go through
1770 # the target dict's keys and remove everything that's been moved into a
1771 # "configurations" section.
1772 delete_keys = []
1773 for key in target_dict:
1774 key_ext = key[-1:]
1775 if key_ext in key_suffixes:
1776 key_base = key[:-1]
1777 else:
1778 key_base = key
1779 if not key_base in non_configuration_keys:
1780 delete_keys.append(key)
1781 for key in delete_keys:
1782 del target_dict[key]
1783
1784
1785 def ProcessListFiltersInDict(name, the_dict):
1786 """Process regular expression and exclusion-based filters on lists.
1787
1788 An exclusion list is in a dict key named with a trailing "!", like
1789 "sources!". Every item in such a list is removed from the associated
1790 main list, which in this example, would be "sources". Removed items are
1791 placed into a "sources_excluded" list in the dict.
1792
1793 Regular expression (regex) filters are contained in dict keys named with a
1794 trailing "/", such as "sources/" to operate on the "sources" list. Regex
1795 filters in a dict take the form:
1796 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ],
1797 ['include', '_mac\\.cc$'] ],
1798 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
1799 _win.cc. The second filter then includes all files ending in _mac.cc that
1800 are now or were once in the "sources" list. Items matching an "exclude"
1801 filter are subject to the same processing as would occur if they were listed
1802 by name in an exclusion list (ending in "!"). Items matching an "include"
1803 filter are brought back into the main list if previously excluded by an
1804 exclusion list or exclusion regex filter. Subsequent matching "exclude"
1805 patterns can still cause items to be excluded after matching an "include".
1806 """
1807
1808 # Look through the dictionary for any lists whose keys end in "!" or "/".
1809 # These are lists that will be treated as exclude lists and regular
1810 # expression-based exclude/include lists. Collect the lists that are
1811 # needed first, looking for the lists that they operate on, and assemble
1812 # then into |lists|. This is done in a separate loop up front, because
1813 # the _included and _excluded keys need to be added to the_dict, and that
1814 # can't be done while iterating through it.
1815
1816 lists = []
1817 del_lists = []
1818 for key, value in the_dict.iteritems():
1819 operation = key[-1]
1820 if operation != '!' and operation != '/':
1821 continue
1822
1823 if not isinstance(value, list):
1824 raise ValueError, name + ' key ' + key + ' must be list, not ' + \
1825 value.__class__.__name__
1826
1827 list_key = key[:-1]
1828 if list_key not in the_dict:
1829 # This happens when there's a list like "sources!" but no corresponding
1830 # "sources" list. Since there's nothing for it to operate on, queue up
1831 # the "sources!" list for deletion now.
1832 del_lists.append(key)
1833 continue
1834
1835 if not isinstance(the_dict[list_key], list):
1836 raise ValueError, name + ' key ' + list_key + \
1837 ' must be list, not ' + \
1838 value.__class__.__name__ + ' when applying ' + \
1839 {'!': 'exclusion', '/': 'regex'}[operation]
1840
1841 if not list_key in lists:
1842 lists.append(list_key)
1843
1844 # Delete the lists that are known to be unneeded at this point.
1845 for del_list in del_lists:
1846 del the_dict[del_list]
1847
1848 for list_key in lists:
1849 the_list = the_dict[list_key]
1850
1851 # Initialize the list_actions list, which is parallel to the_list. Each
1852 # item in list_actions identifies whether the corresponding item in
1853 # the_list should be excluded, unconditionally preserved (included), or
1854 # whether no exclusion or inclusion has been applied. Items for which
1855 # no exclusion or inclusion has been applied (yet) have value -1, items
1856 # excluded have value 0, and items included have value 1. Includes and
1857 # excludes override previous actions. All items in list_actions are
1858 # initialized to -1 because no excludes or includes have been processed
1859 # yet.
1860 list_actions = list((-1,) * len(the_list))
1861
1862 exclude_key = list_key + '!'
1863 if exclude_key in the_dict:
1864 for exclude_item in the_dict[exclude_key]:
1865 for index in xrange(0, len(the_list)):
1866 if exclude_item == the_list[index]:
1867 # This item matches the exclude_item, so set its action to 0
1868 # (exclude).
1869 list_actions[index] = 0
1870
1871 # The "whatever!" list is no longer needed, dump it.
1872 del the_dict[exclude_key]
1873
1874 regex_key = list_key + '/'
1875 if regex_key in the_dict:
1876 for regex_item in the_dict[regex_key]:
1877 [action, pattern] = regex_item
1878 pattern_re = re.compile(pattern)
1879
1880 for index in xrange(0, len(the_list)):
1881 list_item = the_list[index]
1882 if pattern_re.search(list_item):
1883 # Regular expression match.
1884
1885 if action == 'exclude':
1886 # This item matches an exclude regex, so set its value to 0
1887 # (exclude).
1888 list_actions[index] = 0
1889 elif action == 'include':
1890 # This item matches an include regex, so set its value to 1
1891 # (include).
1892 list_actions[index] = 1
1893 else:
1894 # This is an action that doesn't make any sense.
1895 raise ValueError, 'Unrecognized action ' + action + ' in ' + \
1896 name + ' key ' + key
1897
1898 # The "whatever/" list is no longer needed, dump it.
1899 del the_dict[regex_key]
1900
1901 # Add excluded items to the excluded list.
1902 #
1903 # Note that exclude_key ("sources!") is different from excluded_key
1904 # ("sources_excluded"). The exclude_key list is input and it was already
1905 # processed and deleted; the excluded_key list is output and it's about
1906 # to be created.
1907 excluded_key = list_key + '_excluded'
1908 if excluded_key in the_dict:
1909 raise KeyError, \
1910 name + ' key ' + excluded_key + ' must not be present prior ' + \
1911 ' to applying exclusion/regex filters for ' + list_key
1912
1913 excluded_list = []
1914
1915 # Go backwards through the list_actions list so that as items are deleted,
1916 # the indices of items that haven't been seen yet don't shift. That means
1917 # that things need to be prepended to excluded_list to maintain them in the
1918 # same order that they existed in the_list.
1919 for index in xrange(len(list_actions) - 1, -1, -1):
1920 if list_actions[index] == 0:
1921 # Dump anything with action 0 (exclude). Keep anything with action 1
1922 # (include) or -1 (no include or exclude seen for the item).
1923 excluded_list.insert(0, the_list[index])
1924 del the_list[index]
1925
1926 # If anything was excluded, put the excluded list into the_dict at
1927 # excluded_key.
1928 if len(excluded_list) > 0:
1929 the_dict[excluded_key] = excluded_list
1930
1931 # Now recurse into subdicts and lists that may contain dicts.
1932 for key, value in the_dict.iteritems():
1933 if isinstance(value, dict):
1934 ProcessListFiltersInDict(key, value)
1935 elif isinstance(value, list):
1936 ProcessListFiltersInList(key, value)
1937
1938
1939 def ProcessListFiltersInList(name, the_list):
1940 for item in the_list:
1941 if isinstance(item, dict):
1942 ProcessListFiltersInDict(name, item)
1943 elif isinstance(item, list):
1944 ProcessListFiltersInList(name, item)
1945
1946
1947 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
1948 """Ensures that the rules sections in target_dict are valid and consistent,
1949 and determines which sources they apply to.
1950
1951 Arguments:
1952 target: string, name of target.
1953 target_dict: dict, target spec containing "rules" and "sources" lists.
1954 extra_sources_for_rules: a list of keys to scan for rule matches in
1955 addition to 'sources'.
1956 """
1957
1958 # Dicts to map between values found in rules' 'rule_name' and 'extension'
1959 # keys and the rule dicts themselves.
1960 rule_names = {}
1961 rule_extensions = {}
1962
1963 rules = target_dict.get('rules', [])
1964 for rule in rules:
1965 # Make sure that there's no conflict among rule names and extensions.
1966 rule_name = rule['rule_name']
1967 if rule_name in rule_names:
1968 raise KeyError, 'rule %s exists in duplicate, target %s' % \
1969 (rule_name, target)
1970 rule_names[rule_name] = rule
1971
1972 rule_extension = rule['extension']
1973 if rule_extension in rule_extensions:
1974 raise KeyError, ('extension %s associated with multiple rules, ' +
1975 'target %s rules %s and %s') % \
1976 (rule_extension, target,
1977 rule_extensions[rule_extension]['rule_name'],
1978 rule_name)
1979 rule_extensions[rule_extension] = rule
1980
1981 # Make sure rule_sources isn't already there. It's going to be
1982 # created below if needed.
1983 if 'rule_sources' in rule:
1984 raise KeyError, \
1985 'rule_sources must not exist in input, target %s rule %s' % \
1986 (target, rule_name)
1987 extension = rule['extension']
1988
1989 rule_sources = []
1990 source_keys = ['sources']
1991 source_keys.extend(extra_sources_for_rules)
1992 for source_key in source_keys:
1993 for source in target_dict.get(source_key, []):
1994 (source_root, source_extension) = os.path.splitext(source)
1995 if source_extension.startswith('.'):
1996 source_extension = source_extension[1:]
1997 if source_extension == extension:
1998 rule_sources.append(source)
1999
2000 if len(rule_sources) > 0:
2001 rule['rule_sources'] = rule_sources
2002
2003
2004 def ValidateActionsInTarget(target, target_dict, build_file):
2005 '''Validates the inputs to the actions in a target.'''
2006 target_name = target_dict.get('target_name')
2007 actions = target_dict.get('actions', [])
2008 for action in actions:
2009 action_name = action.get('action_name')
2010 if not action_name:
2011 raise Exception("Anonymous action in target %s. "
2012 "An action must have an 'action_name' field." %
2013 target_name)
2014 inputs = action.get('inputs', [])
2015
2016
2017 def ValidateRunAsInTarget(target, target_dict, build_file):
2018 target_name = target_dict.get('target_name')
2019 run_as = target_dict.get('run_as')
2020 if not run_as:
2021 return
2022 if not isinstance(run_as, dict):
2023 raise Exception("The 'run_as' in target %s from file %s should be a "
2024 "dictionary." %
2025 (target_name, build_file))
2026 action = run_as.get('action')
2027 if not action:
2028 raise Exception("The 'run_as' in target %s from file %s must have an "
2029 "'action' section." %
2030 (target_name, build_file))
2031 if not isinstance(action, list):
2032 raise Exception("The 'action' for 'run_as' in target %s from file %s "
2033 "must be a list." %
2034 (target_name, build_file))
2035 working_directory = run_as.get('working_directory')
2036 if working_directory and not isinstance(working_directory, str):
2037 raise Exception("The 'working_directory' for 'run_as' in target %s "
2038 "in file %s should be a string." %
2039 (target_name, build_file))
2040 environment = run_as.get('environment')
2041 if environment and not isinstance(environment, dict):
2042 raise Exception("The 'environment' for 'run_as' in target %s "
2043 "in file %s should be a dictionary." %
2044 (target_name, build_file))
2045
2046
2047 def TurnIntIntoStrInDict(the_dict):
2048 """Given dict the_dict, recursively converts all integers into strings.
2049 """
2050 # Use items instead of iteritems because there's no need to try to look at
2051 # reinserted keys and their associated values.
2052 for k, v in the_dict.items():
2053 if isinstance(v, int):
2054 v = str(v)
2055 the_dict[k] = v
2056 elif isinstance(v, dict):
2057 TurnIntIntoStrInDict(v)
2058 elif isinstance(v, list):
2059 TurnIntIntoStrInList(v)
2060
2061 if isinstance(k, int):
2062 the_dict[str(k)] = v
2063 del the_dict[k]
2064
2065
2066 def TurnIntIntoStrInList(the_list):
2067 """Given list the_list, recursively converts all integers into strings.
2068 """
2069 for index in xrange(0, len(the_list)):
2070 item = the_list[index]
2071 if isinstance(item, int):
2072 the_list[index] = str(item)
2073 elif isinstance(item, dict):
2074 TurnIntIntoStrInDict(item)
2075 elif isinstance(item, list):
2076 TurnIntIntoStrInList(item)
2077
2078
2079 def Load(build_files, variables, includes, depth, generator_input_info, check,
2080 circular_check):
2081 # Set up path_sections and non_configuration_keys with the default data plus
2082 # the generator-specifc data.
2083 global path_sections
2084 path_sections = base_path_sections[:]
2085 path_sections.extend(generator_input_info['path_sections'])
2086
2087 global non_configuration_keys
2088 non_configuration_keys = base_non_configuration_keys[:]
2089 non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2090
2091 # TODO(mark) handle variants if the generator doesn't want them directly.
2092 generator_handles_variants = \
2093 generator_input_info['generator_handles_variants']
2094
2095 global absolute_build_file_paths
2096 absolute_build_file_paths = \
2097 generator_input_info['generator_wants_absolute_build_file_paths']
2098
2099 global multiple_toolsets
2100 multiple_toolsets = generator_input_info[
2101 'generator_supports_multiple_toolsets']
2102
2103 # A generator can have other lists (in addition to sources) be processed
2104 # for rules.
2105 extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2106
2107 # Load build files. This loads every target-containing build file into
2108 # the |data| dictionary such that the keys to |data| are build file names,
2109 # and the values are the entire build file contents after "early" or "pre"
2110 # processing has been done and includes have been resolved.
2111 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2112 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2113 # track of the keys corresponding to "target" files.
2114 data = {'target_build_files': set()}
2115 aux_data = {}
2116 for build_file in build_files:
2117 # Normalize paths everywhere. This is important because paths will be
2118 # used as keys to the data dict and for references between input files.
2119 build_file = os.path.normpath(build_file)
2120 try:
2121 LoadTargetBuildFile(build_file, data, aux_data, variables, includes,
2122 depth, check)
2123 except Exception, e:
2124 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2125 raise
2126
2127 # Build a dict to access each target's subdict by qualified name.
2128 targets = BuildTargetsDict(data)
2129
2130 # Fully qualify all dependency links.
2131 QualifyDependencies(targets)
2132
2133 # Expand dependencies specified as build_file:*.
2134 ExpandWildcardDependencies(targets, data)
2135
2136 if circular_check:
2137 # Make sure that any targets in a.gyp don't contain dependencies in other
2138 # .gyp files that further depend on a.gyp.
2139 VerifyNoGYPFileCircularDependencies(targets)
2140
2141 [dependency_nodes, flat_list] = BuildDependencyList(targets)
2142
2143 # Handle dependent settings of various types.
2144 for settings_type in ['all_dependent_settings',
2145 'direct_dependent_settings',
2146 'link_settings']:
2147 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2148
2149 # Take out the dependent settings now that they've been published to all
2150 # of the targets that require them.
2151 for target in flat_list:
2152 if settings_type in targets[target]:
2153 del targets[target][settings_type]
2154
2155 # Make sure static libraries don't declare dependencies on other static
2156 # libraries, but that linkables depend on all unlinked static libraries
2157 # that they need so that their link steps will be correct.
2158 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes)
2159
2160 # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2161 for target in flat_list:
2162 target_dict = targets[target]
2163 build_file = gyp.common.BuildFile(target)
2164 ProcessVariablesAndConditionsInDict(target_dict, True, variables,
2165 build_file)
2166
2167 # Move everything that can go into a "configurations" section into one.
2168 for target in flat_list:
2169 target_dict = targets[target]
2170 SetUpConfigurations(target, target_dict)
2171
2172 # Apply exclude (!) and regex (/) list filters.
2173 for target in flat_list:
2174 target_dict = targets[target]
2175 ProcessListFiltersInDict(target, target_dict)
2176
2177 # Make sure that the rules make sense, and build up rule_sources lists as
2178 # needed. Not all generators will need to use the rule_sources lists, but
2179 # some may, and it seems best to build the list in a common spot.
2180 # Also validate actions and run_as elements in targets.
2181 for target in flat_list:
2182 target_dict = targets[target]
2183 build_file = gyp.common.BuildFile(target)
2184 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2185 ValidateRunAsInTarget(target, target_dict, build_file)
2186 ValidateActionsInTarget(target, target_dict, build_file)
2187
2188 # Generators might not expect ints. Turn them into strs.
2189 TurnIntIntoStrInDict(data)
2190
2191 # TODO(mark): Return |data| for now because the generator needs a list of
2192 # build files that came in. In the future, maybe it should just accept
2193 # a list, and not the whole data dict.
2194 return [flat_list, targets, data]
+0
-2736
mozc_build_tools/gyp/pylib/gyp/xcodeproj_file.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Xcode project file generator.
7
8 This module is both an Xcode project file generator and a documentation of the
9 Xcode project file format. Knowledge of the project file format was gained
10 based on extensive experience with Xcode, and by making changes to projects in
11 Xcode.app and observing the resultant changes in the associated project files.
12
13 XCODE PROJECT FILES
14
15 The generator targets the file format as written by Xcode 3.1 (specifically,
16 3.1.2), but past experience has taught that the format has not changed
17 significantly in the past several years, and future versions of Xcode are able
18 to read older project files.
19
20 Xcode project files are "bundled": the project "file" from an end-user's
21 perspective is actually a directory with an ".xcodeproj" extension. The
22 project file from this module's perspective is actually a file inside this
23 directory, always named "project.pbxproj". This file contains a complete
24 description of the project and is all that is needed to use the xcodeproj.
25 Other files contained in the xcodeproj directory are simply used to store
26 per-user settings, such as the state of various UI elements in the Xcode
27 application.
28
29 The project.pbxproj file is a property list, stored in a format almost
30 identical to the NeXTstep property list format. The file is able to carry
31 Unicode data, and is encoded in UTF-8. The root element in the property list
32 is a dictionary that contains several properties of minimal interest, and two
33 properties of immense interest. The most important property is a dictionary
34 named "objects". The entire structure of the project is represented by the
35 children of this property. The objects dictionary is keyed by unique 96-bit
36 values represented by 24 uppercase hexadecimal characters. Each value in the
37 objects dictionary is itself a dictionary, describing an individual object.
38
39 Each object in the dictionary is a member of a class, which is identified by
40 the "isa" property of each object. A variety of classes are represented in a
41 project file. Objects can refer to other objects by ID, using the 24-character
42 hexadecimal object key. A project's objects form a tree, with a root object
43 of class PBXProject at the root. As an example, the PBXProject object serves
44 as parent to an XCConfigurationList object defining the build configurations
45 used in the project, a PBXGroup object serving as a container for all files
46 referenced in the project, and a list of target objects, each of which defines
47 a target in the project. There are several different types of target object,
48 such as PBXNativeTarget and PBXAggregateTarget. In this module, this
49 relationship is expressed by having each target type derive from an abstract
50 base named XCTarget.
51
52 The project.pbxproj file's root dictionary also contains a property, sibling to
53 the "objects" dictionary, named "rootObject". The value of rootObject is a
54 24-character object key referring to the root PBXProject object in the
55 objects dictionary.
56
57 In Xcode, every file used as input to a target or produced as a final product
58 of a target must appear somewhere in the hierarchy rooted at the PBXGroup
59 object referenced by the PBXProject's mainGroup property. A PBXGroup is
60 generally represented as a folder in the Xcode application. PBXGroups can
61 contain other PBXGroups as well as PBXFileReferences, which are pointers to
62 actual files.
63
64 Each XCTarget contains a list of build phases, represented in this module by
65 the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
66 are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
67 "Compile Sources" and "Link Binary With Libraries" phases displayed in the
68 Xcode application. Files used as input to these phases (for example, source
69 files in the former case and libraries and frameworks in the latter) are
70 represented by PBXBuildFile objects, referenced by elements of "files" lists
71 in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
72 object as a "weak" reference: it does not "own" the PBXBuildFile, which is
73 owned by the root object's mainGroup or a descendant group. In most cases, the
74 layer of indirection between an XCBuildPhase and a PBXFileReference via a
75 PBXBuildFile appears extraneous, but there's actually one reason for this:
76 file-specific compiler flags are added to the PBXBuildFile object so as to
77 allow a single file to be a member of multiple targets while having distinct
78 compiler flags for each. These flags can be modified in the Xcode applciation
79 in the "Build" tab of a File Info window.
80
81 When a project is open in the Xcode application, Xcode will rewrite it. As
82 such, this module is careful to adhere to the formatting used by Xcode, to
83 avoid insignificant changes appearing in the file when it is used in the
84 Xcode application. This will keep version control repositories happy, and
85 makes it possible to compare a project file used in Xcode to one generated by
86 this module to determine if any significant changes were made in the
87 application.
88
89 Xcode has its own way of assigning 24-character identifiers to each object,
90 which is not duplicated here. Because the identifier only is only generated
91 once, when an object is created, and is then left unchanged, there is no need
92 to attempt to duplicate Xcode's behavior in this area. The generator is free
93 to select any identifier, even at random, to refer to the objects it creates,
94 and Xcode will retain those identifiers and use them when subsequently
95 rewriting the project file. However, the generator would choose new random
96 identifiers each time the project files are generated, leading to difficulties
97 comparing "used" project files to "pristine" ones produced by this module,
98 and causing the appearance of changes as every object identifier is changed
99 when updated projects are checked in to a version control repository. To
100 mitigate this problem, this module chooses identifiers in a more deterministic
101 way, by hashing a description of each object as well as its parent and ancestor
102 objects. This strategy should result in minimal "shift" in IDs as successive
103 generations of project files are produced.
104
105 THIS MODULE
106
107 This module introduces several classes, all derived from the XCObject class.
108 Nearly all of the "brains" are built into the XCObject class, which understands
109 how to create and modify objects, maintain the proper tree structure, compute
110 identifiers, and print objects. For the most part, classes derived from
111 XCObject need only provide a _schema class object, a dictionary that
112 expresses what properties objects of the class may contain.
113
114 Given this structure, it's possible to build a minimal project file by creating
115 objects of the appropriate types and making the proper connections:
116
117 config_list = XCConfigurationList()
118 group = PBXGroup()
119 project = PBXProject({'buildConfigurationList': config_list,
120 'mainGroup': group})
121
122 With the project object set up, it can be added to an XCProjectFile object.
123 XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
124 subclass that does not actually correspond to a class type found in a project
125 file. Rather, it is used to represent the project file's root dictionary.
126 Printing an XCProjectFile will print the entire project file, including the
127 full "objects" dictionary.
128
129 project_file = XCProjectFile({'rootObject': project})
130 project_file.ComputeIDs()
131 project_file.Print()
132
133 Xcode project files are always encoded in UTF-8. This module will accept
134 strings of either the str class or the unicode class. Strings of class str
135 are assumed to already be encoded in UTF-8. Obviously, if you're just using
136 ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
137 Strings of class unicode are handled properly and encoded in UTF-8 when
138 a project file is output.
139 """
140
141 import gyp.common
142 import posixpath
143 import re
144 import struct
145 import sys
146
147 # hashlib is supplied as of Python 2.5 as the replacement interface for sha
148 # and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
149 # available, avoiding a deprecation warning under 2.6. Import sha otherwise,
150 # preserving 2.4 compatibility.
151 try:
152 import hashlib
153 _new_sha1 = hashlib.sha1
154 except ImportError:
155 import sha
156 _new_sha1 = sha.new
157
158
159 # See XCObject._EncodeString. This pattern is used to determine when a string
160 # can be printed unquoted. Strings that match this pattern may be printed
161 # unquoted. Strings that do not match must be quoted and may be further
162 # transformed to be properly encoded. Note that this expression matches the
163 # characters listed with "+", for 1 or more occurrences: if a string is empty,
164 # it must not match this pattern, because it needs to be encoded as "".
165 _unquoted = re.compile('^[A-Za-z0-9$./_]+$')
166
167 # Strings that match this pattern are quoted regardless of what _unquoted says.
168 # Oddly, Xcode will quote any string with a run of three or more underscores.
169 _quoted = re.compile('___')
170
171 # This pattern should match any character that needs to be escaped by
172 # XCObject._EncodeString. See that function.
173 _escaped = re.compile('[\\\\"]|[^ -~]')
174
175
176 # Used by SourceTreeAndPathFromPath
177 _path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$')
178
179 def SourceTreeAndPathFromPath(input_path):
180 """Given input_path, returns a tuple with sourceTree and path values.
181
182 Examples:
183 input_path (source_tree, output_path)
184 '$(VAR)/path' ('VAR', 'path')
185 '$(VAR)' ('VAR', None)
186 'path' (None, 'path')
187 """
188
189 source_group_match = _path_leading_variable.match(input_path)
190 if source_group_match:
191 source_tree = source_group_match.group(1)
192 output_path = source_group_match.group(3) # This may be None.
193 else:
194 source_tree = None
195 output_path = input_path
196
197 return (source_tree, output_path)
198
199 def ConvertVariablesToShellSyntax(input_string):
200 return re.sub('\$\((.*?)\)', '${\\1}', input_string)
201
202 class XCObject(object):
203 """The abstract base of all class types used in Xcode project files.
204
205 Class variables:
206 _schema: A dictionary defining the properties of this class. The keys to
207 _schema are string property keys as used in project files. Values
208 are a list of four or five elements:
209 [ is_list, property_type, is_strong, is_required, default ]
210 is_list: True if the property described is a list, as opposed
211 to a single element.
212 property_type: The type to use as the value of the property,
213 or if is_list is True, the type to use for each
214 element of the value's list. property_type must
215 be an XCObject subclass, or one of the built-in
216 types str, int, or dict.
217 is_strong: If property_type is an XCObject subclass, is_strong
218 is True to assert that this class "owns," or serves
219 as parent, to the property value (or, if is_list is
220 True, values). is_strong must be False if
221 property_type is not an XCObject subclass.
222 is_required: True if the property is required for the class.
223 Note that is_required being True does not preclude
224 an empty string ("", in the case of property_type
225 str) or list ([], in the case of is_list True) from
226 being set for the property.
227 default: Optional. If is_requried is True, default may be set
228 to provide a default value for objects that do not supply
229 their own value. If is_required is True and default
230 is not provided, users of the class must supply their own
231 value for the property.
232 Note that although the values of the array are expressed in
233 boolean terms, subclasses provide values as integers to conserve
234 horizontal space.
235 _should_print_single_line: False in XCObject. Subclasses whose objects
236 should be written to the project file in the
237 alternate single-line format, such as
238 PBXFileReference and PBXBuildFile, should
239 set this to True.
240 _encode_transforms: Used by _EncodeString to encode unprintable characters.
241 The index into this list is the ordinal of the
242 character to transform; each value is a string
243 used to represent the character in the output. XCObject
244 provides an _encode_transforms list suitable for most
245 XCObject subclasses.
246 _alternate_encode_transforms: Provided for subclasses that wish to use
247 the alternate encoding rules. Xcode seems
248 to use these rules when printing objects in
249 single-line format. Subclasses that desire
250 this behavior should set _encode_transforms
251 to _alternate_encode_transforms.
252 _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
253 to construct this object's ID. Most classes that need custom
254 hashing behavior should do it by overriding Hashables,
255 but in some cases an object's parent may wish to push a
256 hashable value into its child, and it can do so by appending
257 to _hashables.
258 Attribues:
259 id: The object's identifier, a 24-character uppercase hexadecimal string.
260 Usually, objects being created should not set id until the entire
261 project file structure is built. At that point, UpdateIDs() should
262 be called on the root object to assign deterministic values for id to
263 each object in the tree.
264 parent: The object's parent. This is set by a parent XCObject when a child
265 object is added to it.
266 _properties: The object's property dictionary. An object's properties are
267 described by its class' _schema variable.
268 """
269
270 _schema = {}
271 _should_print_single_line = False
272
273 # See _EncodeString.
274 _encode_transforms = []
275 i = 0
276 while i < ord(' '):
277 _encode_transforms.append('\\U%04x' % i)
278 i = i + 1
279 _encode_transforms[7] = '\\a'
280 _encode_transforms[8] = '\\b'
281 _encode_transforms[9] = '\\t'
282 _encode_transforms[10] = '\\n'
283 _encode_transforms[11] = '\\v'
284 _encode_transforms[12] = '\\f'
285 _encode_transforms[13] = '\\n'
286
287 _alternate_encode_transforms = list(_encode_transforms)
288 _alternate_encode_transforms[9] = chr(9)
289 _alternate_encode_transforms[10] = chr(10)
290 _alternate_encode_transforms[11] = chr(11)
291
292 def __init__(self, properties=None, id=None, parent=None):
293 self.id = id
294 self.parent = parent
295 self._properties = {}
296 self._hashables = []
297 self._SetDefaultsFromSchema()
298 self.UpdateProperties(properties)
299
300 def __repr__(self):
301 try:
302 name = self.Name()
303 except NotImplementedError:
304 return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
305 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
306
307 def Copy(self):
308 """Make a copy of this object.
309
310 The new object will have its own copy of lists and dicts. Any XCObject
311 objects owned by this object (marked "strong") will be copied in the
312 new object, even those found in lists. If this object has any weak
313 references to other XCObjects, the same references are added to the new
314 object without making a copy.
315 """
316
317 that = self.__class__(id=self.id, parent=self.parent)
318 for key, value in self._properties.iteritems():
319 is_strong = self._schema[key][2]
320
321 if isinstance(value, XCObject):
322 if is_strong:
323 new_value = value.Copy()
324 new_value.parent = that
325 that._properties[key] = new_value
326 else:
327 that._properties[key] = value
328 elif isinstance(value, str) or isinstance(value, unicode) or \
329 isinstance(value, int):
330 that._properties[key] = value
331 elif isinstance(value, list):
332 if is_strong:
333 # If is_strong is True, each element is an XCObject, so it's safe to
334 # call Copy.
335 that._properties[key] = []
336 for item in value:
337 new_item = item.Copy()
338 new_item.parent = that
339 that._properties[key].append(new_item)
340 else:
341 that._properties[key] = value[:]
342 elif isinstance(value, dict):
343 # dicts are never strong.
344 if is_strong:
345 raise TypeError, 'Strong dict for key ' + key + ' in ' + \
346 self.__class__.__name__
347 else:
348 that._properties[key] = value.copy()
349 else:
350 raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \
351 ' for key ' + key + ' in ' + self.__class__.__name__
352
353 return that
354
355 def Name(self):
356 """Return the name corresponding to an object.
357
358 Not all objects necessarily need to be nameable, and not all that do have
359 a "name" property. Override as needed.
360 """
361
362 # If the schema indicates that "name" is required, try to access the
363 # property even if it doesn't exist. This will result in a KeyError
364 # being raised for the property that should be present, which seems more
365 # appropriate than NotImplementedError in this case.
366 if 'name' in self._properties or \
367 ('name' in self._schema and self._schema['name'][3]):
368 return self._properties['name']
369
370 raise NotImplementedError, \
371 self.__class__.__name__ + ' must implement Name'
372
373 def Comment(self):
374 """Return a comment string for the object.
375
376 Most objects just use their name as the comment, but PBXProject uses
377 different values.
378
379 The returned comment is not escaped and does not have any comment marker
380 strings applied to it.
381 """
382
383 return self.Name()
384
385 def Hashables(self):
386 hashables = [self.__class__.__name__]
387
388 name = self.Name()
389 if name != None:
390 hashables.append(name)
391
392 hashables.extend(self._hashables)
393
394 return hashables
395
396 def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
397 """Set "id" properties deterministically.
398
399 An object's "id" property is set based on a hash of its class type and
400 name, as well as the class type and name of all ancestor objects. As
401 such, it is only advisable to call ComputeIDs once an entire project file
402 tree is built.
403
404 If recursive is True, recurse into all descendant objects and update their
405 hashes.
406
407 If overwrite is True, any existing value set in the "id" property will be
408 replaced.
409 """
410
411 def _HashUpdate(hash, data):
412 """Update hash with data's length and contents.
413
414 If the hash were updated only with the value of data, it would be
415 possible for clowns to induce collisions by manipulating the names of
416 their objects. By adding the length, it's exceedingly less likely that
417 ID collisions will be encountered, intentionally or not.
418 """
419
420 hash.update(struct.pack('>i', len(data)))
421 hash.update(data)
422
423 if hash == None:
424 hash = _new_sha1()
425
426 hashables = self.Hashables()
427 assert len(hashables) > 0
428 for hashable in hashables:
429 _HashUpdate(hash, hashable)
430
431 if recursive:
432 for child in self.Children():
433 child.ComputeIDs(recursive, overwrite, hash.copy())
434
435 if overwrite or self.id == None:
436 # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
437 # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
438 # into the portion that gets used.
439 assert hash.digest_size % 4 == 0
440 digest_int_count = hash.digest_size / 4
441 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
442 id_ints = [0, 0, 0]
443 for index in xrange(0, digest_int_count):
444 id_ints[index % 3] ^= digest_ints[index]
445 self.id = '%08X%08X%08X' % tuple(id_ints)
446
447 def EnsureNoIDCollisions(self):
448 """Verifies that no two objects have the same ID. Checks all descendants.
449 """
450
451 ids = {}
452 descendants = self.Descendants()
453 for descendant in descendants:
454 if descendant.id in ids:
455 other = ids[descendant.id]
456 raise KeyError, \
457 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
458 (descendant.id, str(descendant._properties),
459 str(other._properties), self._properties['rootObject'].Name())
460 ids[descendant.id] = descendant
461
462 def Children(self):
463 """Returns a list of all of this object's owned (strong) children."""
464
465 children = []
466 for property, attributes in self._schema.iteritems():
467 (is_list, property_type, is_strong) = attributes[0:3]
468 if is_strong and property in self._properties:
469 if not is_list:
470 children.append(self._properties[property])
471 else:
472 children.extend(self._properties[property])
473 return children
474
475 def Descendants(self):
476 """Returns a list of all of this object's descendants, including this
477 object.
478 """
479
480 children = self.Children()
481 descendants = [self]
482 for child in children:
483 descendants.extend(child.Descendants())
484 return descendants
485
486 def PBXProjectAncestor(self):
487 # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
488 if self.parent:
489 return self.parent.PBXProjectAncestor()
490 return None
491
492 def _EncodeComment(self, comment):
493 """Encodes a comment to be placed in the project file output, mimicing
494 Xcode behavior.
495 """
496
497 # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
498 # the string already contains a "*/", it is turned into "(*)/". This keeps
499 # the file writer from outputting something that would be treated as the
500 # end of a comment in the middle of something intended to be entirely a
501 # comment.
502
503 return '/* ' + comment.replace('*/', '(*)/') + ' */'
504
505 def _EncodeTransform(self, match):
506 # This function works closely with _EncodeString. It will only be called
507 # by re.sub with match.group(0) containing a character matched by the
508 # the _escaped expression.
509 char = match.group(0)
510
511 # Backslashes (\) and quotation marks (") are always replaced with a
512 # backslash-escaped version of the same. Everything else gets its
513 # replacement from the class' _encode_transforms array.
514 if char == '\\':
515 return '\\\\'
516 if char == '"':
517 return '\\"'
518 return self._encode_transforms[ord(char)]
519
520 def _EncodeString(self, value):
521 """Encodes a string to be placed in the project file output, mimicing
522 Xcode behavior.
523 """
524
525 # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
526 # $ (dollar sign), . (period), and _ (underscore) is present. Also use
527 # quotation marks to represent empty strings.
528 #
529 # Escape " (double-quote) and \ (backslash) by preceding them with a
530 # backslash.
531 #
532 # Some characters below the printable ASCII range are encoded specially:
533 # 7 ^G BEL is encoded as "\a"
534 # 8 ^H BS is encoded as "\b"
535 # 11 ^K VT is encoded as "\v"
536 # 12 ^L NP is encoded as "\f"
537 # 127 ^? DEL is passed through as-is without escaping
538 # - In PBXFileReference and PBXBuildFile objects:
539 # 9 ^I HT is passed through as-is without escaping
540 # 10 ^J NL is passed through as-is without escaping
541 # 13 ^M CR is passed through as-is without escaping
542 # - In other objects:
543 # 9 ^I HT is encoded as "\t"
544 # 10 ^J NL is encoded as "\n"
545 # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
546 # 10 ^J NL
547 # All other nonprintable characters within the ASCII range (0 through 127
548 # inclusive) are encoded as "\U001f" referring to the Unicode code point in
549 # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
550 # Characters above the ASCII range are passed through to the output encoded
551 # as UTF-8 without any escaping. These mappings are contained in the
552 # class' _encode_transforms list.
553
554 if _unquoted.search(value) and not _quoted.search(value):
555 return value
556
557 return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
558
559 def _XCPrint(self, file, tabs, line):
560 file.write('\t' * tabs + line)
561
562 def _XCPrintableValue(self, tabs, value, flatten_list=False):
563 """Returns a representation of value that may be printed in a project file,
564 mimicing Xcode's behavior.
565
566 _XCPrintableValue can handle str and int values, XCObjects (which are
567 made printable by returning their id property), and list and dict objects
568 composed of any of the above types. When printing a list or dict, and
569 _should_print_single_line is False, the tabs parameter is used to determine
570 how much to indent the lines corresponding to the items in the list or
571 dict.
572
573 If flatten_list is True, single-element lists will be transformed into
574 strings.
575 """
576
577 printable = ''
578 comment = None
579
580 if self._should_print_single_line:
581 sep = ' '
582 element_tabs = ''
583 end_tabs = ''
584 else:
585 sep = '\n'
586 element_tabs = '\t' * (tabs + 1)
587 end_tabs = '\t' * tabs
588
589 if isinstance(value, XCObject):
590 printable += value.id
591 comment = value.Comment()
592 elif isinstance(value, str):
593 printable += self._EncodeString(value)
594 elif isinstance(value, unicode):
595 printable += self._EncodeString(value.encode('utf-8'))
596 elif isinstance(value, int):
597 printable += str(value)
598 elif isinstance(value, list):
599 if flatten_list and len(value) <= 1:
600 if len(value) == 0:
601 printable += self._EncodeString('')
602 else:
603 printable += self._EncodeString(value[0])
604 else:
605 printable = '(' + sep
606 for item in value:
607 printable += element_tabs + \
608 self._XCPrintableValue(tabs + 1, item, flatten_list) + \
609 ',' + sep
610 printable += end_tabs + ')'
611 elif isinstance(value, dict):
612 printable = '{' + sep
613 for item_key, item_value in sorted(value.iteritems()):
614 printable += element_tabs + \
615 self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
616 self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
617 sep
618 printable += end_tabs + '}'
619 else:
620 raise TypeError, "Can't make " + value.__class__.__name__ + ' printable'
621
622 if comment != None:
623 printable += ' ' + self._EncodeComment(comment)
624
625 return printable
626
627 def _XCKVPrint(self, file, tabs, key, value):
628 """Prints a key and value, members of an XCObject's _properties dictionary,
629 to file.
630
631 tabs is an int identifying the indentation level. If the class'
632 _should_print_single_line variable is True, tabs is ignored and the
633 key-value pair will be followed by a space insead of a newline.
634 """
635
636 if self._should_print_single_line:
637 printable = ''
638 after_kv = ' '
639 else:
640 printable = '\t' * tabs
641 after_kv = '\n'
642
643 # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
644 # objects without comments. Sometimes it prints them with comments, but
645 # the majority of the time, it doesn't. To avoid unnecessary changes to
646 # the project file after Xcode opens it, don't write comments for
647 # remoteGlobalIDString. This is a sucky hack and it would certainly be
648 # cleaner to extend the schema to indicate whether or not a comment should
649 # be printed, but since this is the only case where the problem occurs and
650 # Xcode itself can't seem to make up its mind, the hack will suffice.
651 #
652 # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
653 if key == 'remoteGlobalIDString' and isinstance(self,
654 PBXContainerItemProxy):
655 value_to_print = value.id
656 else:
657 value_to_print = value
658
659 # In another one-off, let's set flatten_list on buildSettings properties
660 # of XCBuildConfiguration objects, because that's how Xcode treats them.
661 if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
662 flatten_list = True
663 else:
664 flatten_list = False
665
666 try:
667 printable += self._XCPrintableValue(tabs, key, flatten_list) + ' = ' + \
668 self._XCPrintableValue(tabs, value_to_print, flatten_list) + \
669 ';' + after_kv
670 except TypeError, e:
671 gyp.common.ExceptionAppend(e,
672 'while printing key "%s"' % key)
673 raise
674
675 self._XCPrint(file, 0, printable)
676
677 def Print(self, file=sys.stdout):
678 """Prints a reprentation of this object to file, adhering to Xcode output
679 formatting.
680 """
681
682 self.VerifyHasRequiredProperties()
683
684 if self._should_print_single_line:
685 # When printing an object in a single line, Xcode doesn't put any space
686 # between the beginning of a dictionary (or presumably a list) and the
687 # first contained item, so you wind up with snippets like
688 # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
689 # If it were me, I would have put a space in there after the opening
690 # curly, but I guess this is just another one of those inconsistencies
691 # between how Xcode prints PBXFileReference and PBXBuildFile objects as
692 # compared to other objects. Mimic Xcode's behavior here by using an
693 # empty string for sep.
694 sep = ''
695 end_tabs = 0
696 else:
697 sep = '\n'
698 end_tabs = 2
699
700 # Start the object. For example, '\t\tPBXProject = {\n'.
701 self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
702
703 # "isa" isn't in the _properties dictionary, it's an intrinsic property
704 # of the class which the object belongs to. Xcode always outputs "isa"
705 # as the first element of an object dictionary.
706 self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
707
708 # The remaining elements of an object dictionary are sorted alphabetically.
709 for property, value in sorted(self._properties.iteritems()):
710 self._XCKVPrint(file, 3, property, value)
711
712 # End the object.
713 self._XCPrint(file, end_tabs, '};\n')
714
715 def UpdateProperties(self, properties, do_copy=False):
716 """Merge the supplied properties into the _properties dictionary.
717
718 The input properties must adhere to the class schema or a KeyError or
719 TypeError exception will be raised. If adding an object of an XCObject
720 subclass and the schema indicates a strong relationship, the object's
721 parent will be set to this object.
722
723 If do_copy is True, then lists, dicts, strong-owned XCObjects, and
724 strong-owned XCObjects in lists will be copied instead of having their
725 references added.
726 """
727
728 if properties == None:
729 return
730
731 for property, value in properties.iteritems():
732 # Make sure the property is in the schema.
733 if not property in self._schema:
734 raise KeyError, property + ' not in ' + self.__class__.__name__
735
736 # Make sure the property conforms to the schema.
737 (is_list, property_type, is_strong) = self._schema[property][0:3]
738 if is_list:
739 if value.__class__ != list:
740 raise TypeError, \
741 property + ' of ' + self.__class__.__name__ + \
742 ' must be list, not ' + value.__class__.__name__
743 for item in value:
744 if not isinstance(item, property_type) and \
745 not (item.__class__ == unicode and property_type == str):
746 # Accept unicode where str is specified. str is treated as
747 # UTF-8-encoded.
748 raise TypeError, \
749 'item of ' + property + ' of ' + self.__class__.__name__ + \
750 ' must be ' + property_type.__name__ + ', not ' + \
751 item.__class__.__name__
752 elif not isinstance(value, property_type) and \
753 not (value.__class__ == unicode and property_type == str):
754 # Accept unicode where str is specified. str is treated as
755 # UTF-8-encoded.
756 raise TypeError, \
757 property + ' of ' + self.__class__.__name__ + ' must be ' + \
758 property_type.__name__ + ', not ' + value.__class__.__name__
759
760 # Checks passed, perform the assignment.
761 if do_copy:
762 if isinstance(value, XCObject):
763 if is_strong:
764 self._properties[property] = value.Copy()
765 else:
766 self._properties[property] = value
767 elif isinstance(value, str) or isinstance(value, unicode) or \
768 isinstance(value, int):
769 self._properties[property] = value
770 elif isinstance(value, list):
771 if is_strong:
772 # If is_strong is True, each element is an XCObject, so it's safe
773 # to call Copy.
774 self._properties[property] = []
775 for item in value:
776 self._properties[property].append(item.Copy())
777 else:
778 self._properties[property] = value[:]
779 elif isinstance(value, dict):
780 self._properties[property] = value.copy()
781 else:
782 raise TypeError, "Don't know how to copy a " + \
783 value.__class__.__name__ + ' object for ' + \
784 property + ' in ' + self.__class__.__name__
785 else:
786 self._properties[property] = value
787
788 # Set up the child's back-reference to this object. Don't use |value|
789 # any more because it may not be right if do_copy is true.
790 if is_strong:
791 if not is_list:
792 self._properties[property].parent = self
793 else:
794 for item in self._properties[property]:
795 item.parent = self
796
797 def HasProperty(self, key):
798 return key in self._properties
799
800 def GetProperty(self, key):
801 return self._properties[key]
802
803 def SetProperty(self, key, value):
804 self.UpdateProperties({key: value})
805
806 def DelProperty(self, key):
807 if key in self._properties:
808 del self._properties[key]
809
810 def AppendProperty(self, key, value):
811 # TODO(mark): Support ExtendProperty too (and make this call that)?
812
813 # Schema validation.
814 if not key in self._schema:
815 raise KeyError, key + ' not in ' + self.__class__.__name__
816
817 (is_list, property_type, is_strong) = self._schema[key][0:3]
818 if not is_list:
819 raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list'
820 if not isinstance(value, property_type):
821 raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \
822 ' must be ' + property_type.__name__ + ', not ' + \
823 value.__class__.__name__
824
825 # If the property doesn't exist yet, create a new empty list to receive the
826 # item.
827 if not key in self._properties:
828 self._properties[key] = []
829
830 # Set up the ownership link.
831 if is_strong:
832 value.parent = self
833
834 # Store the item.
835 self._properties[key].append(value)
836
837 def VerifyHasRequiredProperties(self):
838 """Ensure that all properties identified as required by the schema are
839 set.
840 """
841
842 # TODO(mark): A stronger verification mechanism is needed. Some
843 # subclasses need to perform validation beyond what the schema can enforce.
844 for property, attributes in self._schema.iteritems():
845 (is_list, property_type, is_strong, is_required) = attributes[0:4]
846 if is_required and not property in self._properties:
847 raise KeyError, self.__class__.__name__ + ' requires ' + property
848
849 def _SetDefaultsFromSchema(self):
850 """Assign object default values according to the schema. This will not
851 overwrite properties that have already been set."""
852
853 defaults = {}
854 for property, attributes in self._schema.iteritems():
855 (is_list, property_type, is_strong, is_required) = attributes[0:4]
856 if is_required and len(attributes) >= 5 and \
857 not property in self._properties:
858 default = attributes[4]
859
860 defaults[property] = default
861
862 if len(defaults) > 0:
863 # Use do_copy=True so that each new object gets its own copy of strong
864 # objects, lists, and dicts.
865 self.UpdateProperties(defaults, do_copy=True)
866
867
868 class XCHierarchicalElement(XCObject):
869 """Abstract base for PBXGroup and PBXFileReference. Not represented in a
870 project file."""
871
872 # TODO(mark): Do name and path belong here? Probably so.
873 # If path is set and name is not, name may have a default value. Name will
874 # be set to the basename of path, if the basename of path is different from
875 # the full value of path. If path is already just a leaf name, name will
876 # not be set.
877 _schema = XCObject._schema.copy()
878 _schema.update({
879 'comments': [0, str, 0, 0],
880 'fileEncoding': [0, str, 0, 0],
881 'includeInIndex': [0, int, 0, 0],
882 'indentWidth': [0, int, 0, 0],
883 'lineEnding': [0, int, 0, 0],
884 'sourceTree': [0, str, 0, 1, '<group>'],
885 'tabWidth': [0, int, 0, 0],
886 'usesTabs': [0, int, 0, 0],
887 'wrapsLines': [0, int, 0, 0],
888 })
889
890 def __init__(self, properties=None, id=None, parent=None):
891 # super
892 XCObject.__init__(self, properties, id, parent)
893 if 'path' in self._properties and not 'name' in self._properties:
894 path = self._properties['path']
895 name = posixpath.basename(path)
896 if name != '' and path != name:
897 self.SetProperty('name', name)
898
899 if 'path' in self._properties and \
900 (not 'sourceTree' in self._properties or \
901 self._properties['sourceTree'] == '<group>'):
902 # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
903 # the variable out and make the path be relative to that variable by
904 # assigning the variable name as the sourceTree.
905 (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
906 if source_tree != None:
907 self._properties['sourceTree'] = source_tree
908 if path != None:
909 self._properties['path'] = path
910 if source_tree != None and path == None and \
911 not 'name' in self._properties:
912 # The path was of the form "$(SDKROOT)" with no path following it.
913 # This object is now relative to that variable, so it has no path
914 # attribute of its own. It does, however, keep a name.
915 del self._properties['path']
916 self._properties['name'] = source_tree
917
918 def Name(self):
919 if 'name' in self._properties:
920 return self._properties['name']
921 elif 'path' in self._properties:
922 return self._properties['path']
923 else:
924 # This happens in the case of the root PBXGroup.
925 return None
926
927 def Hashables(self):
928 """Custom hashables for XCHierarchicalElements.
929
930 XCHierarchicalElements are special. Generally, their hashes shouldn't
931 change if the paths don't change. The normal XCObject implementation of
932 Hashables adds a hashable for each object, which means that if
933 the hierarchical structure changes (possibly due to changes caused when
934 TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
935 the hashes will change. For example, if a project file initially contains
936 a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
937 a/b. If someone later adds a/f2 to the project file, a/b can no longer be
938 collapsed, and f1 winds up with parent b and grandparent a. That would
939 be sufficient to change f1's hash.
940
941 To counteract this problem, hashables for all XCHierarchicalElements except
942 for the main group (which has neither a name nor a path) are taken to be
943 just the set of path components. Because hashables are inherited from
944 parents, this provides assurance that a/b/f1 has the same set of hashables
945 whether its parent is b or a/b.
946
947 The main group is a special case. As it is permitted to have no name or
948 path, it is permitted to use the standard XCObject hash mechanism. This
949 is not considered a problem because there can be only one main group.
950 """
951
952 if self == self.PBXProjectAncestor()._properties['mainGroup']:
953 # super
954 return XCObject.Hashables(self)
955
956 hashables = []
957
958 # Put the name in first, ensuring that if TakeOverOnlyChild collapses
959 # children into a top-level group like "Source", the name always goes
960 # into the list of hashables without interfering with path components.
961 if 'name' in self._properties:
962 # Make it less likely for people to manipulate hashes by following the
963 # pattern of always pushing an object type value onto the list first.
964 hashables.append(self.__class__.__name__ + '.name')
965 hashables.append(self._properties['name'])
966
967 # NOTE: This still has the problem that if an absolute path is encountered,
968 # including paths with a sourceTree, they'll still inherit their parents'
969 # hashables, even though the paths aren't relative to their parents. This
970 # is not expected to be much of a problem in practice.
971 path = self.PathFromSourceTreeAndPath()
972 if path != None:
973 components = path.split(posixpath.sep)
974 for component in components:
975 hashables.append(self.__class__.__name__ + '.path')
976 hashables.append(component)
977
978 hashables.extend(self._hashables)
979
980 return hashables
981
982 def Compare(self, other):
983 # Allow comparison of these types. PBXGroup has the highest sort rank;
984 # PBXVariantGroup is treated as equal to PBXFileReference.
985 valid_class_types = {
986 PBXFileReference: 'file',
987 PBXGroup: 'group',
988 PBXVariantGroup: 'file',
989 }
990 self_type = valid_class_types[self.__class__]
991 other_type = valid_class_types[other.__class__]
992
993 if self_type == other_type:
994 # If the two objects are of the same sort rank, compare their names.
995 return cmp(self.Name(), other.Name())
996
997 # Otherwise, sort groups before everything else.
998 if self_type == 'group':
999 return -1
1000 return 1
1001
1002 def CompareRootGroup(self, other):
1003 # This function should be used only to compare direct children of the
1004 # containing PBXProject's mainGroup. These groups should appear in the
1005 # listed order.
1006 # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
1007 # generator should have a way of influencing this list rather than having
1008 # to hardcode for the generator here.
1009 order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
1010 'Build']
1011
1012 # If the groups aren't in the listed order, do a name comparison.
1013 # Otherwise, groups in the listed order should come before those that
1014 # aren't.
1015 self_name = self.Name()
1016 other_name = other.Name()
1017 self_in = isinstance(self, PBXGroup) and self_name in order
1018 other_in = isinstance(self, PBXGroup) and other_name in order
1019 if not self_in and not other_in:
1020 return self.Compare(other)
1021 if self_name in order and not other_name in order:
1022 return -1
1023 if other_name in order and not self_name in order:
1024 return 1
1025
1026 # If both groups are in the listed order, go by the defined order.
1027 self_index = order.index(self_name)
1028 other_index = order.index(other_name)
1029 if self_index < other_index:
1030 return -1
1031 if self_index > other_index:
1032 return 1
1033 return 0
1034
1035 def PathFromSourceTreeAndPath(self):
1036 # Turn the object's sourceTree and path properties into a single flat
1037 # string of a form comparable to the path parameter. If there's a
1038 # sourceTree property other than "<group>", wrap it in $(...) for the
1039 # comparison.
1040 components = []
1041 if self._properties['sourceTree'] != '<group>':
1042 components.append('$(' + self._properties['sourceTree'] + ')')
1043 if 'path' in self._properties:
1044 components.append(self._properties['path'])
1045
1046 if len(components) > 0:
1047 return posixpath.join(*components)
1048
1049 return None
1050
1051 def FullPath(self):
1052 # Returns a full path to self relative to the project file, or relative
1053 # to some other source tree. Start with self, and walk up the chain of
1054 # parents prepending their paths, if any, until no more parents are
1055 # available (project-relative path) or until a path relative to some
1056 # source tree is found.
1057 xche = self
1058 path = None
1059 while isinstance(xche, XCHierarchicalElement) and \
1060 (path == None or \
1061 (not path.startswith('/') and not path.startswith('$'))):
1062 this_path = xche.PathFromSourceTreeAndPath()
1063 if this_path != None and path != None:
1064 path = posixpath.join(this_path, path)
1065 elif this_path != None:
1066 path = this_path
1067 xche = xche.parent
1068
1069 return path
1070
1071
1072 class PBXGroup(XCHierarchicalElement):
1073 """
1074 Attributes:
1075 _children_by_path: Maps pathnames of children of this PBXGroup to the
1076 actual child XCHierarchicalElement objects.
1077 _variant_children_by_name_and_path: Maps (name, path) tuples of
1078 PBXVariantGroup children to the actual child PBXVariantGroup objects.
1079 """
1080
1081 _schema = XCHierarchicalElement._schema.copy()
1082 _schema.update({
1083 'children': [1, XCHierarchicalElement, 1, 1, []],
1084 'name': [0, str, 0, 0],
1085 'path': [0, str, 0, 0],
1086 })
1087
1088 def __init__(self, properties=None, id=None, parent=None):
1089 # super
1090 XCHierarchicalElement.__init__(self, properties, id, parent)
1091 self._children_by_path = {}
1092 self._variant_children_by_name_and_path = {}
1093 for child in self._properties.get('children', []):
1094 self._AddChildToDicts(child)
1095
1096 def _AddChildToDicts(self, child):
1097 # Sets up this PBXGroup object's dicts to reference the child properly.
1098 child_path = child.PathFromSourceTreeAndPath()
1099 if child_path:
1100 if child_path in self._children_by_path:
1101 raise ValueError, 'Found multiple children with path ' + child_path
1102 self._children_by_path[child_path] = child
1103
1104 if isinstance(child, PBXVariantGroup):
1105 child_name = child._properties.get('name', None)
1106 key = (child_name, child_path)
1107 if key in self._variant_children_by_name_and_path:
1108 raise ValueError, 'Found multiple PBXVariantGroup children with ' + \
1109 'name ' + str(child_name) + ' and path ' + \
1110 str(child_path)
1111 self._variant_children_by_name_and_path[key] = child
1112
1113 def AppendChild(self, child):
1114 # Callers should use this instead of calling
1115 # AppendProperty('children', child) directly because this function
1116 # maintains the group's dicts.
1117 self.AppendProperty('children', child)
1118 self._AddChildToDicts(child)
1119
1120 def GetChildByName(self, name):
1121 # This is not currently optimized with a dict as GetChildByPath is because
1122 # it has few callers. Most callers probably want GetChildByPath. This
1123 # function is only useful to get children that have names but no paths,
1124 # which is rare. The children of the main group ("Source", "Products",
1125 # etc.) is pretty much the only case where this likely to come up.
1126 #
1127 # TODO(mark): Maybe this should raise an error if more than one child is
1128 # present with the same name.
1129 if not 'children' in self._properties:
1130 return None
1131
1132 for child in self._properties['children']:
1133 if child.Name() == name:
1134 return child
1135
1136 return None
1137
1138 def GetChildByPath(self, path):
1139 if not path:
1140 return None
1141
1142 if path in self._children_by_path:
1143 return self._children_by_path[path]
1144
1145 return None
1146
1147 def GetChildByRemoteObject(self, remote_object):
1148 # This method is a little bit esoteric. Given a remote_object, which
1149 # should be a PBXFileReference in another project file, this method will
1150 # return this group's PBXReferenceProxy object serving as a local proxy
1151 # for the remote PBXFileReference.
1152 #
1153 # This function might benefit from a dict optimization as GetChildByPath
1154 # for some workloads, but profiling shows that it's not currently a
1155 # problem.
1156 if not 'children' in self._properties:
1157 return None
1158
1159 for child in self._properties['children']:
1160 if not isinstance(child, PBXReferenceProxy):
1161 continue
1162
1163 container_proxy = child._properties['remoteRef']
1164 if container_proxy._properties['remoteGlobalIDString'] == remote_object:
1165 return child
1166
1167 return None
1168
1169 def AddOrGetFileByPath(self, path, hierarchical):
1170 """Returns an existing or new file reference corresponding to path.
1171
1172 If hierarchical is True, this method will create or use the necessary
1173 hierarchical group structure corresponding to path. Otherwise, it will
1174 look in and create an item in the current group only.
1175
1176 If an existing matching reference is found, it is returned, otherwise, a
1177 new one will be created, added to the correct group, and returned.
1178
1179 If path identifies a directory by virtue of carrying a trailing slash,
1180 this method returns a PBXFileReference of "folder" type. If path
1181 identifies a variant, by virtue of it identifying a file inside a directory
1182 with an ".lproj" extension, this method returns a PBXVariantGroup
1183 containing the variant named by path, and possibly other variants. For
1184 all other paths, a "normal" PBXFileReference will be returned.
1185 """
1186
1187 # Adding or getting a directory? Directories end with a trailing slash.
1188 is_dir = False
1189 if path.endswith('/'):
1190 is_dir = True
1191 normpath = posixpath.normpath(path)
1192 if is_dir:
1193 normpath = path + '/'
1194 else:
1195 normpath = path
1196
1197 # Adding or getting a variant? Variants are files inside directories
1198 # with an ".lproj" extension. Xcode uses variants for localization. For
1199 # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
1200 # MainMenu.nib inside path/to, and give it a variant named Language. In
1201 # this example, grandparent would be set to path/to and parent_root would
1202 # be set to Language.
1203 variant_name = None
1204 parent = posixpath.dirname(path)
1205 grandparent = posixpath.dirname(parent)
1206 parent_basename = posixpath.basename(parent)
1207 (parent_root, parent_ext) = posixpath.splitext(parent_basename)
1208 if parent_ext == '.lproj':
1209 variant_name = parent_root
1210 if grandparent == '':
1211 grandparent = None
1212
1213 # Putting a directory inside a variant group is not currently supported.
1214 assert not is_dir or variant_name == None
1215
1216 path_split = path.split(posixpath.sep)
1217 if len(path_split) == 1 or \
1218 ((is_dir or variant_name != None) and len(path_split) == 2) or \
1219 not hierarchical:
1220 # The PBXFileReference or PBXVariantGroup will be added to or gotten from
1221 # this PBXGroup, no recursion necessary.
1222 if variant_name == None:
1223 # Add or get a PBXFileReference.
1224 file_ref = self.GetChildByPath(normpath)
1225 if file_ref != None:
1226 assert file_ref.__class__ == PBXFileReference
1227 else:
1228 file_ref = PBXFileReference({'path': path})
1229 self.AppendChild(file_ref)
1230 else:
1231 # Add or get a PBXVariantGroup. The variant group name is the same
1232 # as the basename (MainMenu.nib in the example above). grandparent
1233 # specifies the path to the variant group itself, and path_split[-2:]
1234 # is the path of the specific variant relative to its group.
1235 variant_group_name = posixpath.basename(path)
1236 variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
1237 variant_group_name, grandparent)
1238 variant_path = posixpath.sep.join(path_split[-2:])
1239 variant_ref = variant_group_ref.GetChildByPath(variant_path)
1240 if variant_ref != None:
1241 assert variant_ref.__class__ == PBXFileReference
1242 else:
1243 variant_ref = PBXFileReference({'name': variant_name,
1244 'path': variant_path})
1245 variant_group_ref.AppendChild(variant_ref)
1246 # The caller is interested in the variant group, not the specific
1247 # variant file.
1248 file_ref = variant_group_ref
1249 return file_ref
1250 else:
1251 # Hierarchical recursion. Add or get a PBXGroup corresponding to the
1252 # outermost path component, and then recurse into it, chopping off that
1253 # path component.
1254 next_dir = path_split[0]
1255 group_ref = self.GetChildByPath(next_dir)
1256 if group_ref != None:
1257 assert group_ref.__class__ == PBXGroup
1258 else:
1259 group_ref = PBXGroup({'path': next_dir})
1260 self.AppendChild(group_ref)
1261 return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
1262 hierarchical)
1263
1264 def AddOrGetVariantGroupByNameAndPath(self, name, path):
1265 """Returns an existing or new PBXVariantGroup for name and path.
1266
1267 If a PBXVariantGroup identified by the name and path arguments is already
1268 present as a child of this object, it is returned. Otherwise, a new
1269 PBXVariantGroup with the correct properties is created, added as a child,
1270 and returned.
1271
1272 This method will generally be called by AddOrGetFileByPath, which knows
1273 when to create a variant group based on the structure of the pathnames
1274 passed to it.
1275 """
1276
1277 key = (name, path)
1278 if key in self._variant_children_by_name_and_path:
1279 variant_group_ref = self._variant_children_by_name_and_path[key]
1280 assert variant_group_ref.__class__ == PBXVariantGroup
1281 return variant_group_ref
1282
1283 variant_group_properties = {'name': name}
1284 if path != None:
1285 variant_group_properties['path'] = path
1286 variant_group_ref = PBXVariantGroup(variant_group_properties)
1287 self.AppendChild(variant_group_ref)
1288
1289 return variant_group_ref
1290
1291 def TakeOverOnlyChild(self, recurse=False):
1292 """If this PBXGroup has only one child and it's also a PBXGroup, take
1293 it over by making all of its children this object's children.
1294
1295 This function will continue to take over only children when those children
1296 are groups. If there are three PBXGroups representing a, b, and c, with
1297 c inside b and b inside a, and a and b have no other children, this will
1298 result in a taking over both b and c, forming a PBXGroup for a/b/c.
1299
1300 If recurse is True, this function will recurse into children and ask them
1301 to collapse themselves by taking over only children as well. Assuming
1302 an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
1303 (d1, d2, and f are files, the rest are groups), recursion will result in
1304 a group for a/b/c containing a group for d3/e.
1305 """
1306
1307 # At this stage, check that child class types are PBXGroup exactly,
1308 # instead of using isinstance. The only subclass of PBXGroup,
1309 # PBXVariantGroup, should not participate in reparenting in the same way:
1310 # reparenting by merging different object types would be wrong.
1311 while len(self._properties['children']) == 1 and \
1312 self._properties['children'][0].__class__ == PBXGroup:
1313 # Loop to take over the innermost only-child group possible.
1314
1315 child = self._properties['children'][0]
1316
1317 # Assume the child's properties, including its children. Save a copy
1318 # of this object's old properties, because they'll still be needed.
1319 # This object retains its existing id and parent attributes.
1320 old_properties = self._properties
1321 self._properties = child._properties
1322 self._children_by_path = child._children_by_path
1323
1324 if not 'sourceTree' in self._properties or \
1325 self._properties['sourceTree'] == '<group>':
1326 # The child was relative to its parent. Fix up the path. Note that
1327 # children with a sourceTree other than "<group>" are not relative to
1328 # their parents, so no path fix-up is needed in that case.
1329 if 'path' in old_properties:
1330 if 'path' in self._properties:
1331 # Both the original parent and child have paths set.
1332 self._properties['path'] = posixpath.join(old_properties['path'],
1333 self._properties['path'])
1334 else:
1335 # Only the original parent has a path, use it.
1336 self._properties['path'] = old_properties['path']
1337 if 'sourceTree' in old_properties:
1338 # The original parent had a sourceTree set, use it.
1339 self._properties['sourceTree'] = old_properties['sourceTree']
1340
1341 # If the original parent had a name set, keep using it. If the original
1342 # parent didn't have a name but the child did, let the child's name
1343 # live on. If the name attribute seems unnecessary now, get rid of it.
1344 if 'name' in old_properties and old_properties['name'] != None and \
1345 old_properties['name'] != self.Name():
1346 self._properties['name'] = old_properties['name']
1347 if 'name' in self._properties and 'path' in self._properties and \
1348 self._properties['name'] == self._properties['path']:
1349 del self._properties['name']
1350
1351 # Notify all children of their new parent.
1352 for child in self._properties['children']:
1353 child.parent = self
1354
1355 # If asked to recurse, recurse.
1356 if recurse:
1357 for child in self._properties['children']:
1358 if child.__class__ == PBXGroup:
1359 child.TakeOverOnlyChild(recurse)
1360
1361 def SortGroup(self):
1362 self._properties['children'] = \
1363 sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
1364
1365 # Recurse.
1366 for child in self._properties['children']:
1367 if isinstance(child, PBXGroup):
1368 child.SortGroup()
1369
1370
1371 class XCFileLikeElement(XCHierarchicalElement):
1372 # Abstract base for objects that can be used as the fileRef property of
1373 # PBXBuildFile.
1374
1375 def PathHashables(self):
1376 # A PBXBuildFile that refers to this object will call this method to
1377 # obtain additional hashables specific to this XCFileLikeElement. Don't
1378 # just use this object's hashables, they're not specific and unique enough
1379 # on their own (without access to the parent hashables.) Instead, provide
1380 # hashables that identify this object by path by getting its hashables as
1381 # well as the hashables of ancestor XCHierarchicalElement objects.
1382
1383 hashables = []
1384 xche = self
1385 while xche != None and isinstance(xche, XCHierarchicalElement):
1386 xche_hashables = xche.Hashables()
1387 for index in xrange(0, len(xche_hashables)):
1388 hashables.insert(index, xche_hashables[index])
1389 xche = xche.parent
1390 return hashables
1391
1392
1393 class XCContainerPortal(XCObject):
1394 # Abstract base for objects that can be used as the containerPortal property
1395 # of PBXContainerItemProxy.
1396 pass
1397
1398
1399 class XCRemoteObject(XCObject):
1400 # Abstract base for objects that can be used as the remoteGlobalIDString
1401 # property of PBXContainerItemProxy.
1402 pass
1403
1404
1405 class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
1406 _schema = XCFileLikeElement._schema.copy()
1407 _schema.update({
1408 'explicitFileType': [0, str, 0, 0],
1409 'lastKnownFileType': [0, str, 0, 0],
1410 'name': [0, str, 0, 0],
1411 'path': [0, str, 0, 1],
1412 })
1413
1414 # Weird output rules for PBXFileReference.
1415 _should_print_single_line = True
1416 # super
1417 _encode_transforms = XCFileLikeElement._alternate_encode_transforms
1418
1419 def __init__(self, properties=None, id=None, parent=None):
1420 # super
1421 XCFileLikeElement.__init__(self, properties, id, parent)
1422 if 'path' in self._properties and self._properties['path'].endswith('/'):
1423 self._properties['path'] = self._properties['path'][:-1]
1424 is_dir = True
1425 else:
1426 is_dir = False
1427
1428 if 'path' in self._properties and \
1429 not 'lastKnownFileType' in self._properties and \
1430 not 'explicitFileType' in self._properties:
1431 # TODO(mark): This is the replacement for a replacement for a quick hack.
1432 # It is no longer incredibly sucky, but this list needs to be extended.
1433 extension_map = {
1434 'a': 'archive.ar',
1435 'app': 'wrapper.application',
1436 'bdic': 'file',
1437 'bundle': 'wrapper.cfbundle',
1438 'c': 'sourcecode.c.c',
1439 'cc': 'sourcecode.cpp.cpp',
1440 'cpp': 'sourcecode.cpp.cpp',
1441 'css': 'text.css',
1442 'cxx': 'sourcecode.cpp.cpp',
1443 'dylib': 'compiled.mach-o.dylib',
1444 'framework': 'wrapper.framework',
1445 'h': 'sourcecode.c.h',
1446 'hxx': 'sourcecode.cpp.h',
1447 'icns': 'image.icns',
1448 'java': 'sourcecode.java',
1449 'js': 'sourcecode.javascript',
1450 'm': 'sourcecode.c.objc',
1451 'mm': 'sourcecode.cpp.objcpp',
1452 'nib': 'wrapper.nib',
1453 'pdf': 'image.pdf',
1454 'pl': 'text.script.perl',
1455 'plist': 'text.plist.xml',
1456 'pm': 'text.script.perl',
1457 'png': 'image.png',
1458 'py': 'text.script.python',
1459 'r': 'sourcecode.rez',
1460 'rez': 'sourcecode.rez',
1461 's': 'sourcecode.asm',
1462 'strings': 'text.plist.strings',
1463 'ttf': 'file',
1464 'xcconfig': 'text.xcconfig',
1465 'xib': 'file.xib',
1466 'y': 'sourcecode.yacc',
1467 }
1468
1469 if is_dir:
1470 file_type = 'folder'
1471 else:
1472 basename = posixpath.basename(self._properties['path'])
1473 (root, ext) = posixpath.splitext(basename)
1474 # Check the map using a lowercase extension.
1475 # TODO(mark): Maybe it should try with the original case first and fall
1476 # back to lowercase, in case there are any instances where case
1477 # matters. There currently aren't.
1478 if ext != '':
1479 ext = ext[1:].lower()
1480
1481 # TODO(mark): "text" is the default value, but "file" is appropriate
1482 # for unrecognized files not containing text. Xcode seems to choose
1483 # based on content.
1484 file_type = extension_map.get(ext, 'text')
1485
1486 self._properties['lastKnownFileType'] = file_type
1487
1488
1489 class PBXVariantGroup(PBXGroup, XCFileLikeElement):
1490 """PBXVariantGroup is used by Xcode to represent localizations."""
1491 # No additions to the schema relative to PBXGroup.
1492 pass
1493
1494
1495 # PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
1496 # because it uses PBXContainerItemProxy, defined below.
1497
1498
1499 class XCBuildConfiguration(XCObject):
1500 _schema = XCObject._schema.copy()
1501 _schema.update({
1502 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
1503 'buildSettings': [0, dict, 0, 1, {}],
1504 'name': [0, str, 0, 1],
1505 })
1506
1507 def HasBuildSetting(self, key):
1508 return key in self._properties['buildSettings']
1509
1510 def GetBuildSetting(self, key):
1511 return self._properties['buildSettings'][key]
1512
1513 def SetBuildSetting(self, key, value):
1514 # TODO(mark): If a list, copy?
1515 self._properties['buildSettings'][key] = value
1516
1517 def AppendBuildSetting(self, key, value):
1518 if not key in self._properties['buildSettings']:
1519 self._properties['buildSettings'][key] = []
1520 self._properties['buildSettings'][key].append(value)
1521
1522 def DelBuildSetting(self, key):
1523 if key in self._properties['buildSettings']:
1524 del self._properties['buildSettings'][key]
1525
1526
1527 class XCConfigurationList(XCObject):
1528 # _configs is the default list of configurations.
1529 _configs = [ XCBuildConfiguration({'name': 'Debug'}),
1530 XCBuildConfiguration({'name': 'Release'}) ]
1531
1532 _schema = XCObject._schema.copy()
1533 _schema.update({
1534 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
1535 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
1536 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
1537 })
1538
1539 def Name(self):
1540 return 'Build configuration list for ' + \
1541 self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
1542
1543 def ConfigurationNamed(self, name):
1544 """Convenience accessor to obtain an XCBuildConfiguration by name."""
1545 for configuration in self._properties['buildConfigurations']:
1546 if configuration._properties['name'] == name:
1547 return configuration
1548
1549 raise KeyError, name
1550
1551 def DefaultConfiguration(self):
1552 """Convenience accessor to obtain the default XCBuildConfiguration."""
1553 return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
1554
1555 def HasBuildSetting(self, key):
1556 """Determines the state of a build setting in all XCBuildConfiguration
1557 child objects.
1558
1559 If all child objects have key in their build settings, and the value is the
1560 same in all child objects, returns 1.
1561
1562 If no child objects have the key in their build settings, returns 0.
1563
1564 If some, but not all, child objects have the key in their build settings,
1565 or if any children have different values for the key, returns -1.
1566 """
1567
1568 has = None
1569 value = None
1570 for configuration in self._properties['buildConfigurations']:
1571 configuration_has = configuration.HasBuildSetting(key)
1572 if has == None:
1573 has = configuration_has
1574 elif has != configuration_has:
1575 return -1
1576
1577 if configuration_has:
1578 configuration_value = configuration.GetBuildSetting(key)
1579 if value == None:
1580 value = configuration_value
1581 elif value != configuration_value:
1582 return -1
1583
1584 if not has:
1585 return 0
1586
1587 return 1
1588
1589 def GetBuildSetting(self, key):
1590 """Gets the build setting for key.
1591
1592 All child XCConfiguration objects must have the same value set for the
1593 setting, or a ValueError will be raised.
1594 """
1595
1596 # TODO(mark): This is wrong for build settings that are lists. The list
1597 # contents should be compared (and a list copy returned?)
1598
1599 value = None
1600 for configuration in self._properties['buildConfigurations']:
1601 configuration_value = configuration.GetBuildSetting(key)
1602 if value == None:
1603 value = configuration_value
1604 else:
1605 if value != configuration_value:
1606 raise ValueError, 'Variant values for ' + key
1607
1608 return value
1609
1610 def SetBuildSetting(self, key, value):
1611 """Sets the build setting for key to value in all child
1612 XCBuildConfiguration objects.
1613 """
1614
1615 for configuration in self._properties['buildConfigurations']:
1616 configuration.SetBuildSetting(key, value)
1617
1618 def AppendBuildSetting(self, key, value):
1619 """Appends value to the build setting for key, which is treated as a list,
1620 in all child XCBuildConfiguration objects.
1621 """
1622
1623 for configuration in self._properties['buildConfigurations']:
1624 configuration.AppendBuildSetting(key, value)
1625
1626 def DelBuildSetting(self, key):
1627 """Deletes the build setting key from all child XCBuildConfiguration
1628 objects.
1629 """
1630
1631 for configuration in self._properties['buildConfigurations']:
1632 configuration.DelBuildSetting(key)
1633
1634
1635 class PBXBuildFile(XCObject):
1636 _schema = XCObject._schema.copy()
1637 _schema.update({
1638 'fileRef': [0, XCFileLikeElement, 0, 1],
1639 })
1640
1641 # Weird output rules for PBXBuildFile.
1642 _should_print_single_line = True
1643 _encode_transforms = XCObject._alternate_encode_transforms
1644
1645 def Name(self):
1646 # Example: "main.cc in Sources"
1647 return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
1648
1649 def Hashables(self):
1650 # super
1651 hashables = XCObject.Hashables(self)
1652
1653 # It is not sufficient to just rely on Name() to get the
1654 # XCFileLikeElement's name, because that is not a complete pathname.
1655 # PathHashables returns hashables unique enough that no two
1656 # PBXBuildFiles should wind up with the same set of hashables, unless
1657 # someone adds the same file multiple times to the same target. That
1658 # would be considered invalid anyway.
1659 hashables.extend(self._properties['fileRef'].PathHashables())
1660
1661 return hashables
1662
1663
1664 class XCBuildPhase(XCObject):
1665 """Abstract base for build phase classes. Not represented in a project
1666 file.
1667
1668 Attributes:
1669 _files_by_path: A dict mapping each path of a child in the files list by
1670 path (keys) to the corresponding PBXBuildFile children (values).
1671 _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
1672 to the corresponding PBXBuildFile children (values).
1673 """
1674
1675 # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
1676 # actually have a "files" list. XCBuildPhase should not have "files" but
1677 # another abstract subclass of it should provide this, and concrete build
1678 # phase types that do have "files" lists should be derived from that new
1679 # abstract subclass. XCBuildPhase should only provide buildActionMask and
1680 # runOnlyForDeploymentPostprocessing, and not files or the various
1681 # file-related methods and attributes.
1682
1683 _schema = XCObject._schema.copy()
1684 _schema.update({
1685 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
1686 'files': [1, PBXBuildFile, 1, 1, []],
1687 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
1688 })
1689
1690 def __init__(self, properties=None, id=None, parent=None):
1691 # super
1692 XCObject.__init__(self, properties, id, parent)
1693
1694 self._files_by_path = {}
1695 self._files_by_xcfilelikeelement = {}
1696 for pbxbuildfile in self._properties.get('files', []):
1697 self._AddBuildFileToDicts(pbxbuildfile)
1698
1699 def FileGroup(self, path):
1700 # Subclasses must override this by returning a two-element tuple. The
1701 # first item in the tuple should be the PBXGroup to which "path" should be
1702 # added, either as a child or deeper descendant. The second item should
1703 # be a boolean indicating whether files should be added into hierarchical
1704 # groups or one single flat group.
1705 raise NotImplementedError, \
1706 self.__class__.__name__ + ' must implement FileGroup'
1707
1708 def _AddPathToDict(self, pbxbuildfile, path):
1709 """Adds path to the dict tracking paths belonging to this build phase.
1710
1711 If the path is already a member of this build phase, raises an exception.
1712 """
1713
1714 if path in self._files_by_path:
1715 raise ValueError, 'Found multiple build files with path ' + path
1716 self._files_by_path[path] = pbxbuildfile
1717
1718 def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
1719 """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
1720
1721 If path is specified, then it is the path that is being added to the
1722 phase, and pbxbuildfile must contain either a PBXFileReference directly
1723 referencing that path, or it must contain a PBXVariantGroup that itself
1724 contains a PBXFileReference referencing the path.
1725
1726 If path is not specified, either the PBXFileReference's path or the paths
1727 of all children of the PBXVariantGroup are taken as being added to the
1728 phase.
1729
1730 If the path is already present in the phase, raises an exception.
1731
1732 If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
1733 are already present in the phase, referenced by a different PBXBuildFile
1734 object, raises an exception. This does not raise an exception when
1735 a PBXFileReference or PBXVariantGroup reappear and are referenced by the
1736 same PBXBuildFile that has already introduced them, because in the case
1737 of PBXVariantGroup objects, they may correspond to multiple paths that are
1738 not all added simultaneously. When this situation occurs, the path needs
1739 to be added to _files_by_path, but nothing needs to change in
1740 _files_by_xcfilelikeelement, and the caller should have avoided adding
1741 the PBXBuildFile if it is already present in the list of children.
1742 """
1743
1744 xcfilelikeelement = pbxbuildfile._properties['fileRef']
1745
1746 paths = []
1747 if path != None:
1748 # It's best when the caller provides the path.
1749 if isinstance(xcfilelikeelement, PBXVariantGroup):
1750 paths.append(path)
1751 else:
1752 # If the caller didn't provide a path, there can be either multiple
1753 # paths (PBXVariantGroup) or one.
1754 if isinstance(xcfilelikeelement, PBXVariantGroup):
1755 for variant in xcfilelikeelement._properties['children']:
1756 paths.append(variant.FullPath())
1757 else:
1758 paths.append(xcfilelikeelement.FullPath())
1759
1760 # Add the paths first, because if something's going to raise, the
1761 # messages provided by _AddPathToDict are more useful owing to its
1762 # having access to a real pathname and not just an object's Name().
1763 for a_path in paths:
1764 self._AddPathToDict(pbxbuildfile, a_path)
1765
1766 # If another PBXBuildFile references this XCFileLikeElement, there's a
1767 # problem.
1768 if xcfilelikeelement in self._files_by_xcfilelikeelement and \
1769 self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
1770 raise ValueError, 'Found multiple build files for ' + \
1771 xcfilelikeelement.Name()
1772 self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
1773
1774 def AppendBuildFile(self, pbxbuildfile, path=None):
1775 # Callers should use this instead of calling
1776 # AppendProperty('files', pbxbuildfile) directly because this function
1777 # maintains the object's dicts. Better yet, callers can just call AddFile
1778 # with a pathname and not worry about building their own PBXBuildFile
1779 # objects.
1780 self.AppendProperty('files', pbxbuildfile)
1781 self._AddBuildFileToDicts(pbxbuildfile, path)
1782
1783 def AddFile(self, path):
1784 (file_group, hierarchical) = self.FileGroup(path)
1785 file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
1786
1787 if file_ref in self._files_by_xcfilelikeelement and \
1788 isinstance(file_ref, PBXVariantGroup):
1789 # There's already a PBXBuildFile in this phase corresponding to the
1790 # PBXVariantGroup. path just provides a new variant that belongs to
1791 # the group. Add the path to the dict.
1792 pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
1793 self._AddBuildFileToDicts(pbxbuildfile, path)
1794 else:
1795 # Add a new PBXBuildFile to get file_ref into the phase.
1796 pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
1797 self.AppendBuildFile(pbxbuildfile, path)
1798
1799
1800 class PBXHeadersBuildPhase(XCBuildPhase):
1801 # No additions to the schema relative to XCBuildPhase.
1802
1803 def Name(self):
1804 return 'Headers'
1805
1806 def FileGroup(self, path):
1807 return self.PBXProjectAncestor().RootGroupForPath(path)
1808
1809
1810 class PBXResourcesBuildPhase(XCBuildPhase):
1811 # No additions to the schema relative to XCBuildPhase.
1812
1813 def Name(self):
1814 return 'Resources'
1815
1816 def FileGroup(self, path):
1817 return self.PBXProjectAncestor().RootGroupForPath(path)
1818
1819
1820 class PBXSourcesBuildPhase(XCBuildPhase):
1821 # No additions to the schema relative to XCBuildPhase.
1822
1823 def Name(self):
1824 return 'Sources'
1825
1826 def FileGroup(self, path):
1827 return self.PBXProjectAncestor().RootGroupForPath(path)
1828
1829
1830 class PBXFrameworksBuildPhase(XCBuildPhase):
1831 # No additions to the schema relative to XCBuildPhase.
1832
1833 def Name(self):
1834 return 'Frameworks'
1835
1836 def FileGroup(self, path):
1837 return (self.PBXProjectAncestor().FrameworksGroup(), False)
1838
1839
1840 class PBXShellScriptBuildPhase(XCBuildPhase):
1841 _schema = XCBuildPhase._schema.copy()
1842 _schema.update({
1843 'inputPaths': [1, str, 0, 1, []],
1844 'name': [0, str, 0, 0],
1845 'outputPaths': [1, str, 0, 1, []],
1846 'shellPath': [0, str, 0, 1, '/bin/sh'],
1847 'shellScript': [0, str, 0, 1],
1848 'showEnvVarsInLog': [0, int, 0, 0],
1849 })
1850
1851 def Name(self):
1852 if 'name' in self._properties:
1853 return self._properties['name']
1854
1855 return 'ShellScript'
1856
1857
1858 class PBXCopyFilesBuildPhase(XCBuildPhase):
1859 _schema = XCBuildPhase._schema.copy()
1860 _schema.update({
1861 'dstPath': [0, str, 0, 1],
1862 'dstSubfolderSpec': [0, int, 0, 1],
1863 'name': [0, str, 0, 0],
1864 })
1865
1866 # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
1867 # "DIR", match group 3 is "path" or None.
1868 path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
1869
1870 # path_tree_to_subfolder maps names of Xcode variables to the associated
1871 # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
1872 path_tree_to_subfolder = {
1873 'BUILT_PRODUCTS_DIR': 16, # Products Directory
1874 # Other types that can be chosen via the Xcode UI.
1875 # TODO(mark): Map Xcode variable names to these.
1876 # : 1, # Wrapper
1877 # : 6, # Executables: 6
1878 # : 7, # Resources
1879 # : 15, # Java Resources
1880 # : 10, # Frameworks
1881 # : 11, # Shared Frameworks
1882 # : 12, # Shared Support
1883 # : 13, # PlugIns
1884 }
1885
1886 def Name(self):
1887 if 'name' in self._properties:
1888 return self._properties['name']
1889
1890 return 'CopyFiles'
1891
1892 def FileGroup(self, path):
1893 return self.PBXProjectAncestor().RootGroupForPath(path)
1894
1895 def SetDestination(self, path):
1896 """Set the dstSubfolderSpec and dstPath properties from path.
1897
1898 path may be specified in the same notation used for XCHierarchicalElements,
1899 specifically, "$(DIR)/path".
1900 """
1901
1902 path_tree_match = self.path_tree_re.search(path)
1903 if path_tree_match:
1904 # Everything else needs to be relative to an Xcode variable.
1905 path_tree = path_tree_match.group(1)
1906 relative_path = path_tree_match.group(3)
1907
1908 if path_tree in self.path_tree_to_subfolder:
1909 subfolder = self.path_tree_to_subfolder[path_tree]
1910 if relative_path == None:
1911 relative_path = ''
1912 else:
1913 # The path starts with an unrecognized Xcode variable
1914 # name like $(SRCROOT). Xcode will still handle this
1915 # as an "absolute path" that starts with the variable.
1916 subfolder = 0
1917 relative_path = path
1918 elif path.startswith('/'):
1919 # Special case. Absolute paths are in dstSubfolderSpec 0.
1920 subfolder = 0
1921 relative_path = path[1:]
1922 else:
1923 raise ValueError, 'Can\'t use path %s in a %s' % \
1924 (path, self.__class__.__name__)
1925
1926 self._properties['dstPath'] = relative_path
1927 self._properties['dstSubfolderSpec'] = subfolder
1928
1929
1930 class PBXBuildRule(XCObject):
1931 _schema = XCObject._schema.copy()
1932 _schema.update({
1933 'compilerSpec': [0, str, 0, 1],
1934 'filePatterns': [0, str, 0, 0],
1935 'fileType': [0, str, 0, 1],
1936 'isEditable': [0, int, 0, 1, 1],
1937 'outputFiles': [1, str, 0, 1, []],
1938 'script': [0, str, 0, 0],
1939 })
1940
1941 def Name(self):
1942 # Not very inspired, but it's what Xcode uses.
1943 return self.__class__.__name__
1944
1945 def Hashables(self):
1946 # super
1947 hashables = XCObject.Hashables(self)
1948
1949 # Use the hashables of the weak objects that this object refers to.
1950 hashables.append(self._properties['fileType'])
1951 if 'filePatterns' in self._properties:
1952 hashables.append(self._properties['filePatterns'])
1953 return hashables
1954
1955
1956 class PBXContainerItemProxy(XCObject):
1957 # When referencing an item in this project file, containerPortal is the
1958 # PBXProject root object of this project file. When referencing an item in
1959 # another project file, containerPortal is a PBXFileReference identifying
1960 # the other project file.
1961 #
1962 # When serving as a proxy to an XCTarget (in this project file or another),
1963 # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
1964 # project file), proxyType is 2. Type 2 is used for references to the
1965 # producs of the other project file's targets.
1966 #
1967 # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
1968 # a comment, indicating that it's tracked internally simply as a string, but
1969 # sometimes it's printed with a comment (usually when the object is initially
1970 # created), indicating that it's tracked as a project file object at least
1971 # sometimes. This module always tracks it as an object, but contains a hack
1972 # to prevent it from printing the comment in the project file output. See
1973 # _XCKVPrint.
1974 _schema = XCObject._schema.copy()
1975 _schema.update({
1976 'containerPortal': [0, XCContainerPortal, 0, 1],
1977 'proxyType': [0, int, 0, 1],
1978 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
1979 'remoteInfo': [0, str, 0, 1],
1980 })
1981
1982 def __repr__(self):
1983 props = self._properties
1984 name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
1985 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
1986
1987 def Name(self):
1988 # Admittedly not the best name, but it's what Xcode uses.
1989 return self.__class__.__name__
1990
1991 def Hashables(self):
1992 # super
1993 hashables = XCObject.Hashables(self)
1994
1995 # Use the hashables of the weak objects that this object refers to.
1996 hashables.extend(self._properties['containerPortal'].Hashables())
1997 hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
1998 return hashables
1999
2000
2001 class PBXTargetDependency(XCObject):
2002 # The "target" property accepts an XCTarget object, and obviously not
2003 # NoneType. But XCTarget is defined below, so it can't be put into the
2004 # schema yet. The definition of PBXTargetDependency can't be moved below
2005 # XCTarget because XCTarget's own schema references PBXTargetDependency.
2006 # Python doesn't deal well with this circular relationship, and doesn't have
2007 # a real way to do forward declarations. To work around, the type of
2008 # the "target" property is reset below, after XCTarget is defined.
2009 #
2010 # At least one of "name" and "target" is required.
2011 _schema = XCObject._schema.copy()
2012 _schema.update({
2013 'name': [0, str, 0, 0],
2014 'target': [0, None.__class__, 0, 0],
2015 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
2016 })
2017
2018 def __repr__(self):
2019 name = self._properties.get('name') or self._properties['target'].Name()
2020 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
2021
2022 def Name(self):
2023 # Admittedly not the best name, but it's what Xcode uses.
2024 return self.__class__.__name__
2025
2026 def Hashables(self):
2027 # super
2028 hashables = XCObject.Hashables(self)
2029
2030 # Use the hashables of the weak objects that this object refers to.
2031 hashables.extend(self._properties['targetProxy'].Hashables())
2032 return hashables
2033
2034
2035 class PBXReferenceProxy(XCFileLikeElement):
2036 _schema = XCFileLikeElement._schema.copy()
2037 _schema.update({
2038 'fileType': [0, str, 0, 1],
2039 'path': [0, str, 0, 1],
2040 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
2041 })
2042
2043
2044 class XCTarget(XCRemoteObject):
2045 # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
2046 # to allow PBXProject to be used in the remoteGlobalIDString property of
2047 # PBXContainerItemProxy.
2048 #
2049 # Setting a "name" property at instantiation may also affect "productName",
2050 # which may in turn affect the "PRODUCT_NAME" build setting in children of
2051 # "buildConfigurationList". See __init__ below.
2052 _schema = XCRemoteObject._schema.copy()
2053 _schema.update({
2054 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
2055 XCConfigurationList()],
2056 'buildPhases': [1, XCBuildPhase, 1, 1, []],
2057 'dependencies': [1, PBXTargetDependency, 1, 1, []],
2058 'name': [0, str, 0, 1],
2059 'productName': [0, str, 0, 1],
2060 })
2061
2062 def __init__(self, properties=None, id=None, parent=None,
2063 force_outdir=None, force_prefix=None, force_extension=None):
2064 # super
2065 XCRemoteObject.__init__(self, properties, id, parent)
2066
2067 # Set up additional defaults not expressed in the schema. If a "name"
2068 # property was supplied, set "productName" if it is not present. Also set
2069 # the "PRODUCT_NAME" build setting in each configuration, but only if
2070 # the setting is not present in any build configuration.
2071 if 'name' in self._properties:
2072 if not 'productName' in self._properties:
2073 self.SetProperty('productName', self._properties['name'])
2074
2075 if 'productName' in self._properties:
2076 if 'buildConfigurationList' in self._properties:
2077 configs = self._properties['buildConfigurationList']
2078 if configs.HasBuildSetting('PRODUCT_NAME') == 0:
2079 configs.SetBuildSetting('PRODUCT_NAME',
2080 self._properties['productName'])
2081
2082 def AddDependency(self, other):
2083 pbxproject = self.PBXProjectAncestor()
2084 other_pbxproject = other.PBXProjectAncestor()
2085 if pbxproject == other_pbxproject:
2086 # The easy case. Add a dependency to another target in the same
2087 # project file.
2088 container = PBXContainerItemProxy({'containerPortal': pbxproject,
2089 'proxyType': 1,
2090 'remoteGlobalIDString': other,
2091 'remoteInfo': other.Name()})
2092 dependency = PBXTargetDependency({'target': other,
2093 'targetProxy': container})
2094 self.AppendProperty('dependencies', dependency)
2095 else:
2096 # The hard case. Add a dependency to a target in a different project
2097 # file. Actually, this case isn't really so hard.
2098 other_project_ref = \
2099 pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
2100 container = PBXContainerItemProxy({
2101 'containerPortal': other_project_ref,
2102 'proxyType': 1,
2103 'remoteGlobalIDString': other,
2104 'remoteInfo': other.Name(),
2105 })
2106 dependency = PBXTargetDependency({'name': other.Name(),
2107 'targetProxy': container})
2108 self.AppendProperty('dependencies', dependency)
2109
2110 # Proxy all of these through to the build configuration list.
2111
2112 def ConfigurationNamed(self, name):
2113 return self._properties['buildConfigurationList'].ConfigurationNamed(name)
2114
2115 def DefaultConfiguration(self):
2116 return self._properties['buildConfigurationList'].DefaultConfiguration()
2117
2118 def HasBuildSetting(self, key):
2119 return self._properties['buildConfigurationList'].HasBuildSetting(key)
2120
2121 def GetBuildSetting(self, key):
2122 return self._properties['buildConfigurationList'].GetBuildSetting(key)
2123
2124 def SetBuildSetting(self, key, value):
2125 return self._properties['buildConfigurationList'].SetBuildSetting(key, \
2126 value)
2127
2128 def AppendBuildSetting(self, key, value):
2129 return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
2130 value)
2131
2132 def DelBuildSetting(self, key):
2133 return self._properties['buildConfigurationList'].DelBuildSetting(key)
2134
2135
2136 # Redefine the type of the "target" property. See PBXTargetDependency._schema
2137 # above.
2138 PBXTargetDependency._schema['target'][1] = XCTarget
2139
2140
2141 class PBXNativeTarget(XCTarget):
2142 # buildPhases is overridden in the schema to be able to set defaults.
2143 #
2144 # NOTE: Contrary to most objects, it is advisable to set parent when
2145 # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
2146 # object. A parent reference is required for a PBXNativeTarget during
2147 # construction to be able to set up the target defaults for productReference,
2148 # because a PBXBuildFile object must be created for the target and it must
2149 # be added to the PBXProject's mainGroup hierarchy.
2150 _schema = XCTarget._schema.copy()
2151 _schema.update({
2152 'buildPhases': [1, XCBuildPhase, 1, 1,
2153 [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
2154 'buildRules': [1, PBXBuildRule, 1, 1, []],
2155 'productReference': [0, PBXFileReference, 0, 1],
2156 'productType': [0, str, 0, 1],
2157 })
2158
2159 # Mapping from Xcode product-types to settings. The settings are:
2160 # filetype : used for explicitFileType in the project file
2161 # prefix : the prefix for the file name
2162 # suffix : the suffix for the filen ame
2163 # set_xc_exe_prefix : bool to say if EXECUTABLE_PREFIX should be set to the
2164 # prefix value.
2165 _product_filetypes = {
2166 'com.apple.product-type.application': ['wrapper.application',
2167 '', '.app', False],
2168 'com.apple.product-type.bundle': ['wrapper.cfbundle',
2169 '', '.bundle', False],
2170 'com.apple.product-type.framework': ['wrapper.framework',
2171 '', '.framework', False],
2172 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
2173 'lib', '.dylib', True],
2174 'com.apple.product-type.library.static': ['archive.ar',
2175 'lib', '.a', False],
2176 'com.apple.product-type.tool': ['compiled.mach-o.executable',
2177 '', '', False],
2178 }
2179
2180 def __init__(self, properties=None, id=None, parent=None,
2181 force_outdir=None, force_prefix=None, force_extension=None):
2182 # super
2183 XCTarget.__init__(self, properties, id, parent)
2184
2185 if 'productName' in self._properties and \
2186 'productType' in self._properties and \
2187 not 'productReference' in self._properties and \
2188 self._properties['productType'] in self._product_filetypes:
2189 products_group = None
2190 pbxproject = self.PBXProjectAncestor()
2191 if pbxproject != None:
2192 products_group = pbxproject.ProductsGroup()
2193
2194 if products_group != None:
2195 (filetype, prefix, suffix, set_xc_exe_prefix) = \
2196 self._product_filetypes[self._properties['productType']]
2197
2198 if force_extension is not None:
2199 # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
2200 if filetype.startswith('wrapper.'):
2201 self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
2202 else:
2203 # Extension override.
2204 suffix = '.' + force_extension
2205 self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
2206
2207 if filetype.startswith('compiled.mach-o.executable'):
2208 product_name = self._properties['productName']
2209 product_name += suffix
2210 suffix = ''
2211 self.SetProperty('productName', product_name)
2212 self.SetBuildSetting('PRODUCT_NAME', product_name)
2213
2214 # Xcode handles most prefixes based on the target type, however there
2215 # are exceptions. If a "BSD Dynamic Library" target is added in the
2216 # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
2217 # behavior.
2218 if force_prefix is not None:
2219 prefix = force_prefix
2220 if filetype.startswith('wrapper.'):
2221 self.SetBuildSetting('WRAPPER_PREFIX', prefix)
2222 else:
2223 self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
2224
2225 if force_outdir is not None:
2226 self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
2227
2228 # TODO(tvl): Remove the below hack.
2229 # http://code.google.com/p/gyp/issues/detail?id=122
2230
2231 # Some targets include the prefix in the target_name. These targets
2232 # really should just add a product_name setting that doesn't include
2233 # the prefix. For example:
2234 # target_name = 'libevent', product_name = 'event'
2235 # This check cleans up for them.
2236 product_name = self._properties['productName']
2237 prefix_len = len(prefix)
2238 if prefix_len and (product_name[:prefix_len] == prefix):
2239 product_name = product_name[prefix_len:]
2240 self.SetProperty('productName', product_name)
2241 self.SetBuildSetting('PRODUCT_NAME', product_name)
2242
2243 ref_props = {
2244 'explicitFileType': filetype,
2245 'includeInIndex': 0,
2246 'path': prefix + product_name + suffix,
2247 'sourceTree': 'BUILT_PRODUCTS_DIR',
2248 }
2249 file_ref = PBXFileReference(ref_props)
2250 products_group.AppendChild(file_ref)
2251 self.SetProperty('productReference', file_ref)
2252
2253 def GetBuildPhaseByType(self, type):
2254 if not 'buildPhases' in self._properties:
2255 return None
2256
2257 the_phase = None
2258 for phase in self._properties['buildPhases']:
2259 if isinstance(phase, type):
2260 # Some phases may be present in multiples in a well-formed project file,
2261 # but phases like PBXSourcesBuildPhase may only be present singly, and
2262 # this function is intended as an aid to GetBuildPhaseByType. Loop
2263 # over the entire list of phases and assert if more than one of the
2264 # desired type is found.
2265 assert the_phase == None
2266 the_phase = phase
2267
2268 return the_phase
2269
2270 def ResourcesPhase(self):
2271 resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
2272 if resources_phase == None:
2273 resources_phase = PBXResourcesBuildPhase()
2274
2275 # The resources phase should come before the sources and frameworks
2276 # phases, if any.
2277 insert_at = len(self._properties['buildPhases'])
2278 for index in xrange(0, len(self._properties['buildPhases'])):
2279 phase = self._properties['buildPhases'][index]
2280 if isinstance(phase, PBXSourcesBuildPhase) or \
2281 isinstance(phase, PBXFrameworksBuildPhase):
2282 insert_at = index
2283 break
2284
2285 self._properties['buildPhases'].insert(insert_at, resources_phase)
2286 resources_phase.parent = self
2287
2288 return resources_phase
2289
2290 def SourcesPhase(self):
2291 sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
2292 if sources_phase == None:
2293 sources_phase = PBXSourcesBuildPhase()
2294 self.AppendProperty('buildPhases', sources_phase)
2295
2296 return sources_phase
2297
2298 def FrameworksPhase(self):
2299 frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
2300 if frameworks_phase == None:
2301 frameworks_phase = PBXFrameworksBuildPhase()
2302 self.AppendProperty('buildPhases', frameworks_phase)
2303
2304 return frameworks_phase
2305
2306 def AddDependency(self, other):
2307 # super
2308 XCTarget.AddDependency(self, other)
2309
2310 static_library_type = 'com.apple.product-type.library.static'
2311 shared_library_type = 'com.apple.product-type.library.dynamic'
2312 framework_type = 'com.apple.product-type.framework'
2313 if isinstance(other, PBXNativeTarget) and \
2314 'productType' in self._properties and \
2315 self._properties['productType'] != static_library_type and \
2316 'productType' in other._properties and \
2317 (other._properties['productType'] == static_library_type or \
2318 ((other._properties['productType'] == shared_library_type or \
2319 other._properties['productType'] == framework_type) and \
2320 ((not other.HasBuildSetting('MACH_O_TYPE')) or
2321 other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
2322
2323 file_ref = other.GetProperty('productReference')
2324
2325 pbxproject = self.PBXProjectAncestor()
2326 other_pbxproject = other.PBXProjectAncestor()
2327 if pbxproject != other_pbxproject:
2328 other_project_product_group = \
2329 pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
2330 file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
2331
2332 self.FrameworksPhase().AppendProperty('files',
2333 PBXBuildFile({'fileRef': file_ref}))
2334
2335
2336 class PBXAggregateTarget(XCTarget):
2337 pass
2338
2339
2340 class PBXProject(XCContainerPortal):
2341 # A PBXProject is really just an XCObject, the XCContainerPortal thing is
2342 # just to allow PBXProject to be used in the containerPortal property of
2343 # PBXContainerItemProxy.
2344 """
2345
2346 Attributes:
2347 path: "sample.xcodeproj". TODO(mark) Document me!
2348 _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
2349 value is a reference to the dict in the
2350 projectReferences list associated with the keyed
2351 PBXProject.
2352 """
2353
2354 _schema = XCContainerPortal._schema.copy()
2355 _schema.update({
2356 'attributes': [0, dict, 0, 0],
2357 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
2358 XCConfigurationList()],
2359 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.1'],
2360 'hasScannedForEncodings': [0, int, 0, 1, 1],
2361 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
2362 'projectDirPath': [0, str, 0, 1, ''],
2363 'projectReferences': [1, dict, 0, 0],
2364 'projectRoot': [0, str, 0, 1, ''],
2365 'targets': [1, XCTarget, 1, 1, []],
2366 })
2367
2368 def __init__(self, properties=None, id=None, parent=None, path=None):
2369 self.path = path
2370 self._other_pbxprojects = {}
2371 # super
2372 return XCContainerPortal.__init__(self, properties, id, parent)
2373
2374 def Name(self):
2375 name = self.path
2376 if name[-10:] == '.xcodeproj':
2377 name = name[:-10]
2378 return posixpath.basename(name)
2379
2380 def Path(self):
2381 return self.path
2382
2383 def Comment(self):
2384 return 'Project object'
2385
2386 def Children(self):
2387 # super
2388 children = XCContainerPortal.Children(self)
2389
2390 # Add children that the schema doesn't know about. Maybe there's a more
2391 # elegant way around this, but this is the only case where we need to own
2392 # objects in a dictionary (that is itself in a list), and three lines for
2393 # a one-off isn't that big a deal.
2394 if 'projectReferences' in self._properties:
2395 for reference in self._properties['projectReferences']:
2396 children.append(reference['ProductGroup'])
2397
2398 return children
2399
2400 def PBXProjectAncestor(self):
2401 return self
2402
2403 def _GroupByName(self, name):
2404 if not 'mainGroup' in self._properties:
2405 self.SetProperty('mainGroup', PBXGroup())
2406
2407 main_group = self._properties['mainGroup']
2408 group = main_group.GetChildByName(name)
2409 if group == None:
2410 group = PBXGroup({'name': name})
2411 main_group.AppendChild(group)
2412
2413 return group
2414
2415 # SourceGroup and ProductsGroup are created by default in Xcode's own
2416 # templates.
2417 def SourceGroup(self):
2418 return self._GroupByName('Source')
2419
2420 def ProductsGroup(self):
2421 return self._GroupByName('Products')
2422
2423 # IntermediatesGroup is used to collect source-like files that are generated
2424 # by rules or script phases and are placed in intermediate directories such
2425 # as DerivedSources.
2426 def IntermediatesGroup(self):
2427 return self._GroupByName('Intermediates')
2428
2429 # FrameworksGroup and ProjectsGroup are top-level groups used to collect
2430 # frameworks and projects.
2431 def FrameworksGroup(self):
2432 return self._GroupByName('Frameworks')
2433
2434 def ProjectsGroup(self):
2435 return self._GroupByName('Projects')
2436
2437 def RootGroupForPath(self, path):
2438 """Returns a PBXGroup child of this object to which path should be added.
2439
2440 This method is intended to choose between SourceGroup and
2441 IntermediatesGroup on the basis of whether path is present in a source
2442 directory or an intermediates directory. For the purposes of this
2443 determination, any path located within a derived file directory such as
2444 PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
2445 directory.
2446
2447 The returned value is a two-element tuple. The first element is the
2448 PBXGroup, and the second element specifies whether that group should be
2449 organized hierarchically (True) or as a single flat list (False).
2450 """
2451
2452 # TODO(mark): make this a class variable and bind to self on call?
2453 # Also, this list is nowhere near exhaustive.
2454 # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
2455 # gyp.generator.xcode. There should probably be some way for that module
2456 # to push the names in, rather than having to hard-code them here.
2457 source_tree_groups = {
2458 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
2459 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
2460 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
2461 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
2462 }
2463
2464 (source_tree, path) = SourceTreeAndPathFromPath(path)
2465 if source_tree != None and source_tree in source_tree_groups:
2466 (group_func, hierarchical) = source_tree_groups[source_tree]
2467 group = group_func()
2468 return (group, hierarchical)
2469
2470 # TODO(mark): make additional choices based on file extension.
2471
2472 return (self.SourceGroup(), True)
2473
2474 def AddOrGetFileInRootGroup(self, path):
2475 """Returns a PBXFileReference corresponding to path in the correct group
2476 according to RootGroupForPath's heuristics.
2477
2478 If an existing PBXFileReference for path exists, it will be returned.
2479 Otherwise, one will be created and returned.
2480 """
2481
2482 (group, hierarchical) = self.RootGroupForPath(path)
2483 return group.AddOrGetFileByPath(path, hierarchical)
2484
2485 def RootGroupsTakeOverOnlyChildren(self, recurse=False):
2486 """Calls TakeOverOnlyChild for all groups in the main group."""
2487
2488 for group in self._properties['mainGroup']._properties['children']:
2489 if isinstance(group, PBXGroup):
2490 group.TakeOverOnlyChild(recurse)
2491
2492 def SortGroups(self):
2493 # Sort the children of the mainGroup (like "Source" and "Products")
2494 # according to their defined order.
2495 self._properties['mainGroup']._properties['children'] = \
2496 sorted(self._properties['mainGroup']._properties['children'],
2497 cmp=lambda x,y: x.CompareRootGroup(y))
2498
2499 # Sort everything else by putting group before files, and going
2500 # alphabetically by name within sections of groups and files. SortGroup
2501 # is recursive.
2502 for group in self._properties['mainGroup']._properties['children']:
2503 if not isinstance(group, PBXGroup):
2504 continue
2505
2506 if group.Name() == 'Products':
2507 # The Products group is a special case. Instead of sorting
2508 # alphabetically, sort things in the order of the targets that
2509 # produce the products. To do this, just build up a new list of
2510 # products based on the targets.
2511 products = []
2512 for target in self._properties['targets']:
2513 if not isinstance(target, PBXNativeTarget):
2514 continue
2515 product = target._properties['productReference']
2516 # Make sure that the product is already in the products group.
2517 assert product in group._properties['children']
2518 products.append(product)
2519
2520 # Make sure that this process doesn't miss anything that was already
2521 # in the products group.
2522 assert len(products) == len(group._properties['children'])
2523 group._properties['children'] = products
2524 else:
2525 group.SortGroup()
2526
2527 def AddOrGetProjectReference(self, other_pbxproject):
2528 """Add a reference to another project file (via PBXProject object) to this
2529 one.
2530
2531 Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
2532 this project file that contains a PBXReferenceProxy object for each
2533 product of each PBXNativeTarget in the other project file. ProjectRef is
2534 a PBXFileReference to the other project file.
2535
2536 If this project file already references the other project file, the
2537 existing ProductGroup and ProjectRef are returned. The ProductGroup will
2538 still be updated if necessary.
2539 """
2540
2541 if not 'projectReferences' in self._properties:
2542 self._properties['projectReferences'] = []
2543
2544 product_group = None
2545 project_ref = None
2546
2547 if not other_pbxproject in self._other_pbxprojects:
2548 # This project file isn't yet linked to the other one. Establish the
2549 # link.
2550 product_group = PBXGroup({'name': 'Products'})
2551
2552 # ProductGroup is strong.
2553 product_group.parent = self
2554
2555 # There's nothing unique about this PBXGroup, and if left alone, it will
2556 # wind up with the same set of hashables as all other PBXGroup objects
2557 # owned by the projectReferences list. Add the hashables of the
2558 # remote PBXProject that it's related to.
2559 product_group._hashables.extend(other_pbxproject.Hashables())
2560
2561 # The other project reports its path as relative to the same directory
2562 # that this project's path is relative to. The other project's path
2563 # is not necessarily already relative to this project. Figure out the
2564 # pathname that this project needs to use to refer to the other one.
2565 this_path = posixpath.dirname(self.Path())
2566 projectDirPath = self.GetProperty('projectDirPath')
2567 if projectDirPath:
2568 if posixpath.isabs(projectDirPath[0]):
2569 this_path = projectDirPath
2570 else:
2571 this_path = posixpath.join(this_path, projectDirPath)
2572 other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
2573
2574 # ProjectRef is weak (it's owned by the mainGroup hierarchy).
2575 project_ref = PBXFileReference({
2576 'lastKnownFileType': 'wrapper.pb-project',
2577 'path': other_path,
2578 'sourceTree': 'SOURCE_ROOT',
2579 })
2580 self.ProjectsGroup().AppendChild(project_ref)
2581
2582 ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
2583 self._other_pbxprojects[other_pbxproject] = ref_dict
2584 self.AppendProperty('projectReferences', ref_dict)
2585
2586 # Xcode seems to sort this list case-insensitively
2587 self._properties['projectReferences'] = \
2588 sorted(self._properties['projectReferences'], cmp=lambda x,y:
2589 cmp(x['ProjectRef'].Name().lower(),
2590 y['ProjectRef'].Name().lower()))
2591 else:
2592 # The link already exists. Pull out the relevnt data.
2593 project_ref_dict = self._other_pbxprojects[other_pbxproject]
2594 product_group = project_ref_dict['ProductGroup']
2595 project_ref = project_ref_dict['ProjectRef']
2596
2597 self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
2598
2599 return [product_group, project_ref]
2600
2601 def _SetUpProductReferences(self, other_pbxproject, product_group,
2602 project_ref):
2603 # TODO(mark): This only adds references to products in other_pbxproject
2604 # when they don't exist in this pbxproject. Perhaps it should also
2605 # remove references from this pbxproject that are no longer present in
2606 # other_pbxproject. Perhaps it should update various properties if they
2607 # change.
2608 for target in other_pbxproject._properties['targets']:
2609 if not isinstance(target, PBXNativeTarget):
2610 continue
2611
2612 other_fileref = target._properties['productReference']
2613 if product_group.GetChildByRemoteObject(other_fileref) == None:
2614 # Xcode sets remoteInfo to the name of the target and not the name
2615 # of its product, despite this proxy being a reference to the product.
2616 container_item = PBXContainerItemProxy({
2617 'containerPortal': project_ref,
2618 'proxyType': 2,
2619 'remoteGlobalIDString': other_fileref,
2620 'remoteInfo': target.Name()
2621 })
2622 # TODO(mark): Does sourceTree get copied straight over from the other
2623 # project? Can the other project ever have lastKnownFileType here
2624 # instead of explicitFileType? (Use it if so?) Can path ever be
2625 # unset? (I don't think so.) Can other_fileref have name set, and
2626 # does it impact the PBXReferenceProxy if so? These are the questions
2627 # that perhaps will be answered one day.
2628 reference_proxy = PBXReferenceProxy({
2629 'fileType': other_fileref._properties['explicitFileType'],
2630 'path': other_fileref._properties['path'],
2631 'sourceTree': other_fileref._properties['sourceTree'],
2632 'remoteRef': container_item,
2633 })
2634
2635 product_group.AppendChild(reference_proxy)
2636
2637 def SortRemoteProductReferences(self):
2638 # For each remote project file, sort the associated ProductGroup in the
2639 # same order that the targets are sorted in the remote project file. This
2640 # is the sort order used by Xcode.
2641
2642 def CompareProducts(x, y, remote_products):
2643 # x and y are PBXReferenceProxy objects. Go through their associated
2644 # PBXContainerItem to get the remote PBXFileReference, which will be
2645 # present in the remote_products list.
2646 x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
2647 y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
2648 x_index = remote_products.index(x_remote)
2649 y_index = remote_products.index(y_remote)
2650
2651 # Use the order of each remote PBXFileReference in remote_products to
2652 # determine the sort order.
2653 return cmp(x_index, y_index)
2654
2655 for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
2656 # Build up a list of products in the remote project file, ordered the
2657 # same as the targets that produce them.
2658 remote_products = []
2659 for target in other_pbxproject._properties['targets']:
2660 if not isinstance(target, PBXNativeTarget):
2661 continue
2662 remote_products.append(target._properties['productReference'])
2663
2664 # Sort the PBXReferenceProxy children according to the list of remote
2665 # products.
2666 product_group = ref_dict['ProductGroup']
2667 product_group._properties['children'] = sorted(
2668 product_group._properties['children'],
2669 cmp=lambda x, y: CompareProducts(x, y, remote_products))
2670
2671
2672 class XCProjectFile(XCObject):
2673 _schema = XCObject._schema.copy()
2674 _schema.update({
2675 'archiveVersion': [0, int, 0, 1, 1],
2676 'classes': [0, dict, 0, 1, {}],
2677 'objectVersion': [0, int, 0, 1, 45],
2678 'rootObject': [0, PBXProject, 1, 1],
2679 })
2680
2681 def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
2682 # Although XCProjectFile is implemented here as an XCObject, it's not a
2683 # proper object in the Xcode sense, and it certainly doesn't have its own
2684 # ID. Pass through an attempt to update IDs to the real root object.
2685 if recursive:
2686 self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
2687
2688 def Print(self, file=sys.stdout):
2689 self.VerifyHasRequiredProperties()
2690
2691 # Add the special "objects" property, which will be caught and handled
2692 # separately during printing. This structure allows a fairly standard
2693 # loop do the normal printing.
2694 self._properties['objects'] = {}
2695 self._XCPrint(file, 0, '// !$*UTF8*$!\n')
2696 if self._should_print_single_line:
2697 self._XCPrint(file, 0, '{ ')
2698 else:
2699 self._XCPrint(file, 0, '{\n')
2700 for property, value in sorted(self._properties.iteritems(),
2701 cmp=lambda x, y: cmp(x, y)):
2702 if property == 'objects':
2703 self._PrintObjects(file)
2704 else:
2705 self._XCKVPrint(file, 1, property, value)
2706 self._XCPrint(file, 0, '}\n')
2707 del self._properties['objects']
2708
2709 def _PrintObjects(self, file):
2710 if self._should_print_single_line:
2711 self._XCPrint(file, 0, 'objects = {')
2712 else:
2713 self._XCPrint(file, 1, 'objects = {\n')
2714
2715 objects_by_class = {}
2716 for object in self.Descendants():
2717 if object == self:
2718 continue
2719 class_name = object.__class__.__name__
2720 if not class_name in objects_by_class:
2721 objects_by_class[class_name] = []
2722 objects_by_class[class_name].append(object)
2723
2724 for class_name in sorted(objects_by_class):
2725 self._XCPrint(file, 0, '\n')
2726 self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
2727 for object in sorted(objects_by_class[class_name],
2728 cmp=lambda x, y: cmp(x.id, y.id)):
2729 object.Print(file)
2730 self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
2731
2732 if self._should_print_single_line:
2733 self._XCPrint(file, 0, '}; ')
2734 else:
2735 self._XCPrint(file, 1, '};\n')
+0
-81
mozc_build_tools/gyp/samples/samples less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import os.path
7 import shutil
8 import sys
9
10
11 gyps = [
12 'app/app.gyp',
13 'base/base.gyp',
14 'build/temp_gyp/googleurl.gyp',
15 'build/all.gyp',
16 'build/common.gypi',
17 'build/external_code.gypi',
18 'chrome/test/security_tests/security_tests.gyp',
19 'chrome/third_party/hunspell/hunspell.gyp',
20 'chrome/chrome.gyp',
21 'media/media.gyp',
22 'net/net.gyp',
23 'printing/printing.gyp',
24 'sdch/sdch.gyp',
25 'skia/skia.gyp',
26 'testing/gmock.gyp',
27 'testing/gtest.gyp',
28 'third_party/bzip2/bzip2.gyp',
29 'third_party/icu38/icu38.gyp',
30 'third_party/libevent/libevent.gyp',
31 'third_party/libjpeg/libjpeg.gyp',
32 'third_party/libpng/libpng.gyp',
33 'third_party/libxml/libxml.gyp',
34 'third_party/libxslt/libxslt.gyp',
35 'third_party/lzma_sdk/lzma_sdk.gyp',
36 'third_party/modp_b64/modp_b64.gyp',
37 'third_party/npapi/npapi.gyp',
38 'third_party/sqlite/sqlite.gyp',
39 'third_party/zlib/zlib.gyp',
40 'v8/tools/gyp/v8.gyp',
41 'webkit/activex_shim/activex_shim.gyp',
42 'webkit/activex_shim_dll/activex_shim_dll.gyp',
43 'webkit/build/action_csspropertynames.py',
44 'webkit/build/action_cssvaluekeywords.py',
45 'webkit/build/action_jsconfig.py',
46 'webkit/build/action_makenames.py',
47 'webkit/build/action_maketokenizer.py',
48 'webkit/build/action_useragentstylesheets.py',
49 'webkit/build/rule_binding.py',
50 'webkit/build/rule_bison.py',
51 'webkit/build/rule_gperf.py',
52 'webkit/tools/test_shell/test_shell.gyp',
53 'webkit/webkit.gyp',
54 ]
55
56
57 def Main(argv):
58 if len(argv) != 3 or argv[1] not in ['push', 'pull']:
59 print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
60 return 1
61
62 path_to_chrome = argv[2]
63
64 for g in gyps:
65 chrome_file = os.path.join(path_to_chrome, g)
66 local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
67 if argv[1] == 'push':
68 print 'Copying %s to %s' % (local_file, chrome_file)
69 shutil.copyfile(local_file, chrome_file)
70 elif argv[1] == 'pull':
71 print 'Copying %s to %s' % (chrome_file, local_file)
72 shutil.copyfile(chrome_file, local_file)
73 else:
74 assert False
75
76 return 0
77
78
79 if __name__ == '__main__':
80 sys.exit(Main(sys.argv))
+0
-5
mozc_build_tools/gyp/samples/samples.bat less more
0 @rem Copyright (c) 2009 Google Inc. All rights reserved.
1 @rem Use of this source code is governed by a BSD-style license that can be
2 @rem found in the LICENSE file.
3
4 @python %~dp0/samples %*
+0
-26
mozc_build_tools/gyp/setup.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 from distutils.core import setup
7 from distutils.command.install import install
8 from distutils.command.install_lib import install_lib
9 from distutils.command.install_scripts import install_scripts
10
11 setup(
12 name='gyp',
13 version='0.1',
14 description='Generate Your Projects',
15 author='Chromium Authors',
16 author_email='chromium-dev@googlegroups.com',
17 url='http://code.google.com/p/gyp',
18 package_dir = {'': 'pylib'},
19 packages=['gyp', 'gyp.generator'],
20
21 scripts = ['gyp'],
22 cmdclass = {'install': install,
23 'install_lib': install_lib,
24 'install_scripts': install_scripts},
25 )
+0
-94
mozc_build_tools/gyp/test/actions/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using an explicit build target of 'all'.
8 """
9
10 import glob
11 import os
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('actions.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 # Test that an "always run" action increases a counter on multiple invocations,
21 # and that a dependent action updates in step.
22 test.build('actions.gyp', test.ALL, chdir='relocate/src')
23 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
24 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
25 test.build('actions.gyp', test.ALL, chdir='relocate/src')
26 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
27 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
28
29 # The "always run" action only counts to 2, but the dependent target will count
30 # forever if it's allowed to run. This verifies that the dependent target only
31 # runs when the "always run" action generates new output, not just because the
32 # "always run" ran.
33 test.build('actions.gyp', test.ALL, chdir='relocate/src')
34 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
35 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
36
37 expect = """\
38 Hello from program.c
39 Hello from make-prog1.py
40 Hello from make-prog2.py
41 """
42
43 if test.format == 'xcode':
44 chdir = 'relocate/src/subdir1'
45 else:
46 chdir = 'relocate/src'
47 test.run_built_executable('program', chdir=chdir, stdout=expect)
48
49
50 test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
51
52
53 expect = "Hello from generate_main.py\n"
54
55 if test.format == 'xcode':
56 chdir = 'relocate/src/subdir3'
57 else:
58 chdir = 'relocate/src'
59 test.run_built_executable('null_input', chdir=chdir, stdout=expect)
60
61
62 # Clean out files which may have been created if test.ALL was run.
63 def clean_dep_files():
64 for file in (glob.glob('relocate/src/dep_*.txt') +
65 glob.glob('relocate/src/deps_all_done_*.txt')):
66 if os.path.exists(file):
67 os.remove(file)
68
69 # Confirm our clean.
70 clean_dep_files()
71 test.must_not_exist('relocate/src/dep_1.txt')
72 test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
73
74 # Make sure all deps finish before an action is run on a 'None' target.
75 # If using the Make builder, add -j to make things more difficult.
76 arguments = []
77 if test.format == 'make':
78 arguments = ['-j']
79 test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
80 arguments=arguments)
81 test.must_exist('relocate/src/deps_all_done_first_123.txt')
82
83 # Try again with a target that has deps in reverse. Output files from
84 # previous tests deleted. Confirm this execution did NOT run the ALL
85 # target which would mess up our dep tests.
86 clean_dep_files()
87 test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
88 arguments=arguments)
89 test.must_exist('relocate/src/deps_all_done_first_321.txt')
90 test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
91
92
93 test.pass_test()
+0
-61
mozc_build_tools/gyp/test/actions/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using the default build target.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 # Test that an "always run" action increases a counter on multiple invocations,
19 # and that a dependent action updates in step.
20 test.build('actions.gyp', chdir='relocate/src')
21 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
22 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
23 test.build('actions.gyp', chdir='relocate/src')
24 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
25 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
26
27 # The "always run" action only counts to 2, but the dependent target will count
28 # forever if it's allowed to run. This verifies that the dependent target only
29 # runs when the "always run" action generates new output, not just because the
30 # "always run" ran.
31 test.build('actions.gyp', test.ALL, chdir='relocate/src')
32 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
33 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
34
35 expect = """\
36 Hello from program.c
37 Hello from make-prog1.py
38 Hello from make-prog2.py
39 """
40
41 if test.format == 'xcode':
42 chdir = 'relocate/src/subdir1'
43 else:
44 chdir = 'relocate/src'
45 test.run_built_executable('program', chdir=chdir, stdout=expect)
46
47
48 test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
49
50
51 expect = "Hello from generate_main.py\n"
52
53 if test.format == 'xcode':
54 chdir = 'relocate/src/subdir3'
55 else:
56 chdir = 'relocate/src'
57 test.run_built_executable('null_input', chdir=chdir, stdout=expect)
58
59
60 test.pass_test()
+0
-24
mozc_build_tools/gyp/test/actions/gyptest-errors.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies behavior for different action configuration errors:
8 exit status of 1, and the expected error message must be in stderr.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15
16 test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
17 expect = [
18 "Anonymous action in target broken_actions2. An action must have an 'action_name' field.",
19 ]
20 test.must_contain_all_lines(test.stderr(), expect)
21
22
23 test.pass_test()
+0
-24
mozc_build_tools/gyp/test/actions/src/action_missing_name.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'broken_actions2',
8 'type': 'none',
9 'actions': [
10 {
11 'inputs': [
12 'no_name.input',
13 ],
14 'action': [
15 'python',
16 '-c',
17 'print \'missing name\'',
18 ],
19 },
20 ],
21 },
22 ],
23 }
+0
-114
mozc_build_tools/gyp/test/actions/src/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 'subdir3/null_input.gyp:*',
13 ],
14 },
15 {
16 'target_name': 'depend_on_always_run_action',
17 'type': 'none',
18 'dependencies': [ 'subdir1/executable.gyp:counter' ],
19 'actions': [
20 {
21 'action_name': 'use_always_run_output',
22 'inputs': [
23 'subdir1/actions-out/action-counter.txt',
24 'subdir1/counter.py',
25 ],
26 'outputs': [
27 'subdir1/actions-out/action-counter_2.txt',
28 ],
29 'action': [
30 'python', 'subdir1/counter.py', '<(_outputs)',
31 ],
32 # Allows the test to run without hermetic cygwin on windows.
33 'msvs_cygwin_shell': 0,
34 },
35 ],
36 },
37
38 # Three deps which don't finish immediately.
39 # Each one has a small delay then creates a file.
40 # Delays are 1.0, 1.1, and 2.0 seconds.
41 {
42 'target_name': 'dep_1',
43 'type': 'none',
44 'actions': [{
45 'inputs': [ 'actions.gyp' ],
46 'outputs': [ 'dep_1.txt' ],
47 'action_name': 'dep_1',
48 'action': [ 'python', '-c',
49 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
50 # Allows the test to run without hermetic cygwin on windows.
51 'msvs_cygwin_shell': 0,
52 }],
53 },
54 {
55 'target_name': 'dep_2',
56 'type': 'none',
57 'actions': [{
58 'inputs': [ 'actions.gyp' ],
59 'outputs': [ 'dep_2.txt' ],
60 'action_name': 'dep_2',
61 'action': [ 'python', '-c',
62 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
63 # Allows the test to run without hermetic cygwin on windows.
64 'msvs_cygwin_shell': 0,
65 }],
66 },
67 {
68 'target_name': 'dep_3',
69 'type': 'none',
70 'actions': [{
71 'inputs': [ 'actions.gyp' ],
72 'outputs': [ 'dep_3.txt' ],
73 'action_name': 'dep_3',
74 'action': [ 'python', '-c',
75 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
76 # Allows the test to run without hermetic cygwin on windows.
77 'msvs_cygwin_shell': 0,
78 }],
79 },
80
81 # An action which assumes the deps have completed.
82 # Does NOT list the output files of it's deps as inputs.
83 # On success create the file deps_all_done_first.txt.
84 {
85 'target_name': 'action_with_dependencies_123',
86 'type': 'none',
87 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
88 'actions': [{
89 'inputs': [ 'actions.gyp' ],
90 'outputs': [ 'deps_all_done_first_123.txt' ],
91 'action_name': 'action_with_dependencies_123',
92 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
93 # Allows the test to run without hermetic cygwin on windows.
94 'msvs_cygwin_shell': 0,
95 }],
96 },
97 # Same as above but with deps in reverse.
98 {
99 'target_name': 'action_with_dependencies_321',
100 'type': 'none',
101 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
102 'actions': [{
103 'inputs': [ 'actions.gyp' ],
104 'outputs': [ 'deps_all_done_first_321.txt' ],
105 'action_name': 'action_with_dependencies_321',
106 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
107 # Allows the test to run without hermetic cygwin on windows.
108 'msvs_cygwin_shell': 0,
109 }],
110 },
111
112 ],
113 }
+0
-16
mozc_build_tools/gyp/test/actions/src/confirm-dep-files.py less more
0 #!/usr/bin/python
1
2 # Confirm presence of files generated by our targets we depend on.
3 # If they exist, create a new file.
4 #
5 # Note target's input files are explicitly NOT defined in the gyp file
6 # so they can't easily be passed to this script as args.
7
8 import os
9 import sys
10
11 outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
12 if (os.path.exists("dep_1.txt") and
13 os.path.exists("dep_2.txt") and
14 os.path.exists("dep_3.txt")):
15 open(outfile, "w")
+0
-46
mozc_build_tools/gyp/test/actions/src/subdir1/counter.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7 import time
8
9 output = sys.argv[1]
10 persistoutput = "%s.persist" % sys.argv[1]
11
12 count = 0
13 try:
14 count = open(persistoutput, 'r').read()
15 except:
16 pass
17 count = int(count) + 1
18
19 if len(sys.argv) > 2:
20 max_count = int(sys.argv[2])
21 if count > max_count:
22 count = max_count
23
24 oldcount = 0
25 try:
26 oldcount = open(output, 'r').read()
27 except:
28 pass
29
30 # Save the count in a file that is undeclared, and thus hidden, to gyp. We need
31 # to do this because, prior to running commands, scons deletes any declared
32 # outputs, so we would lose our count if we just wrote to the given output file.
33 # (The other option is to use Precious() in the scons generator, but that seems
34 # too heavy-handed just to support this somewhat unrealistic test case, and
35 # might lead to unintended side-effects).
36 open(persistoutput, 'w').write('%d' % (count))
37
38 # Only write the given output file if the count has changed.
39 if int(oldcount) != count:
40 open(output, 'w').write('%d' % (count))
41 # Sleep so the next run changes the file time sufficiently to make the build
42 # detect the file as changed.
43 time.sleep(1)
44
45 sys.exit(0)
+0
-74
mozc_build_tools/gyp/test/actions/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 ],
13 'actions': [
14 {
15 'action_name': 'make-prog1',
16 'inputs': [
17 'make-prog1.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/prog1.c',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 },
27 {
28 'action_name': 'make-prog2',
29 'inputs': [
30 'make-prog2.py',
31 ],
32 'outputs': [
33 'actions-out/prog2.c',
34 ],
35 'action': [
36 'python', '<(_inputs)', '<@(_outputs)',
37 ],
38 'process_outputs_as_sources': 1,
39 # Allows the test to run without hermetic cygwin on windows.
40 'msvs_cygwin_shell': 0,
41 },
42 ],
43 },
44 {
45 'target_name': 'counter',
46 'type': 'none',
47 'actions': [
48 {
49 # This action should always run, regardless of whether or not it's
50 # inputs or the command-line change. We do this by creating a dummy
51 # first output, which is always missing, thus causing the build to
52 # always try to recreate it. Actual output files should be listed
53 # after the dummy one, and dependent targets should list the real
54 # output(s) in their inputs
55 # (see '../actions.gyp:depend_on_always_run_action').
56 'action_name': 'action_counter',
57 'inputs': [
58 'counter.py',
59 ],
60 'outputs': [
61 'actions-out/action-counter.txt.always',
62 'actions-out/action-counter.txt',
63 ],
64 'action': [
65 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
66 ],
67 # Allows the test to run without hermetic cygwin on windows.
68 'msvs_cygwin_shell': 0,
69 },
70 ],
71 },
72 ],
73 }
+0
-20
mozc_build_tools/gyp/test/actions/src/subdir1/make-prog1.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog1(void)
12 {
13 printf("Hello from make-prog1.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-20
mozc_build_tools/gyp/test/actions/src/subdir1/make-prog2.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog2(void)
12 {
13 printf("Hello from make-prog2.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-12
mozc_build_tools/gyp/test/actions/src/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-11
mozc_build_tools/gyp/test/actions/src/subdir2/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = "Hello from make-file.py\n"
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-33
mozc_build_tools/gyp/test/actions/src/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 'file.out',
18 # TODO: enhance testing infrastructure to test this
19 # without having to hard-code the intermediate dir paths.
20 #'<(INTERMEDIATE_DIR)/file.out',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 # Allows the test to run without hermetic cygwin on windows.
27 'msvs_cygwin_shell': 0,
28 }
29 ],
30 },
31 ],
32 }
+0
-21
mozc_build_tools/gyp/test/actions/src/subdir3/generate_main.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = """
9 #include <stdio.h>
10
11 int main(int argc, char *argv[])
12 {
13 printf("Hello from generate_main.py\\n");
14 return 0;
15 }
16 """
17
18 open(sys.argv[1], 'w').write(contents)
19
20 sys.exit(0)
+0
-29
mozc_build_tools/gyp/test/actions/src/subdir3/null_input.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'null_input',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'generate_main',
13 'process_outputs_as_sources': 1,
14 'inputs': [],
15 'outputs': [
16 '<(INTERMEDIATE_DIR)/main.c',
17 ],
18 'action': [
19 # TODO: we can't just use <(_outputs) here?!
20 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
21 ],
22 # Allows the test to run without hermetic cygwin on windows.
23 'msvs_cygwin_shell': 0,
24 },
25 ],
26 },
27 ],
28 }
+0
-23
mozc_build_tools/gyp/test/actions-bare/gyptest-bare.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies actions which are not depended on by other targets get executed.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('bare.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16 test.build('bare.gyp', chdir='relocate/src')
17
18 file_content = 'Hello from bare.py\n'
19
20 test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
21
22 test.pass_test()
+0
-25
mozc_build_tools/gyp/test/actions-bare/src/bare.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'bare',
8 'type': 'none',
9 'actions': [
10 {
11 'action_name': 'action1',
12 'inputs': [
13 'bare.py',
14 ],
15 'outputs': [
16 '<(PRODUCT_DIR)/out.txt',
17 ],
18 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
19 'msvs_cygwin_shell': 0,
20 },
21 ],
22 },
23 ],
24 }
+0
-11
mozc_build_tools/gyp/test/actions-bare/src/bare.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 f = open(sys.argv[1], 'wb')
9 f.write('Hello from bare.py\n')
10 f.close()
+0
-26
mozc_build_tools/gyp/test/actions-subdir/gyptest-action.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test actions that output to PRODUCT_DIR.
8 """
9
10 import TestGyp
11
12 # TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
13 test = TestGyp.TestGyp(formats=['!xcode'])
14
15 test.run_gyp('none.gyp', chdir='src')
16
17 test.build('none.gyp', test.ALL, chdir='src')
18
19 file_content = 'Hello from make-file.py\n'
20 subdir_file_content = 'Hello from make-subdir-file.py\n'
21
22 test.built_file_must_match('file.out', file_content, chdir='src')
23 test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
24
25 test.pass_test()
+0
-11
mozc_build_tools/gyp/test/actions-subdir/src/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = 'Hello from make-file.py\n'
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-31
mozc_build_tools/gyp/test/actions-subdir/src/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/file.out',
18 ],
19 'action': [
20 'python', '<(_inputs)', '<@(_outputs)',
21 ],
22 'process_outputs_as_sources': 1,
23 }
24 ],
25 'dependencies': [
26 'subdir/subdir.gyp:subdir_file',
27 ],
28 },
29 ],
30 }
+0
-11
mozc_build_tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = 'Hello from make-subdir-file.py\n'
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-28
mozc_build_tools/gyp/test/actions-subdir/src/subdir/subdir.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'subdir_file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-subdir-file',
13 'inputs': [
14 'make-subdir-file.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/subdir_file.out',
18 ],
19 'action': [
20 'python', '<(_inputs)', '<@(_outputs)',
21 ],
22 'process_outputs_as_sources': 1,
23 }
24 ],
25 },
26 ],
27 }
+0
-55
mozc_build_tools/gyp/test/additional-targets/gyptest-additional.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16
17 # Build all.
18 test.build('all.gyp', chdir='relocate/src')
19
20 if test.format=='xcode':
21 chdir = 'relocate/src/dir1'
22 else:
23 chdir = 'relocate/src'
24
25 # Output is as expected.
26 file_content = 'Hello from emit.py\n'
27 test.built_file_must_match('out2.txt', file_content, chdir=chdir)
28
29 test.built_file_must_not_exist('out.txt', chdir='relocate/src')
30 test.built_file_must_not_exist('foolib1',
31 type=test.SHARED_LIB,
32 chdir=chdir)
33
34 # TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
35 if test.format == 'make':
36 chdir='relocate/src'
37 else:
38 chdir='relocate/src/dir1'
39
40 # Build the action explicitly.
41 test.build('actions.gyp', 'action1_target', chdir=chdir)
42
43 # Check that things got run.
44 file_content = 'Hello from emit.py\n'
45 test.built_file_must_exist('out.txt', chdir=chdir)
46
47 # Build the shared library explicitly.
48 test.build('actions.gyp', 'foolib1', chdir=chdir)
49
50 test.built_file_must_exist('foolib1',
51 type=test.SHARED_LIB,
52 chdir=chdir)
53
54 test.pass_test()
+0
-13
mozc_build_tools/gyp/test/additional-targets/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'all_targets',
8 'type': 'none',
9 'dependencies': ['dir1/actions.gyp:*'],
10 },
11 ],
12 }
+0
-56
mozc_build_tools/gyp/test/additional-targets/src/dir1/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'action1_target',
8 'type': 'none',
9 'suppress_wildcard': 1,
10 'actions': [
11 {
12 'action_name': 'action1',
13 'inputs': [
14 'emit.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/out.txt',
18 ],
19 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
20 'msvs_cygwin_shell': 0,
21 },
22 ],
23 },
24 {
25 'target_name': 'action2_target',
26 'type': 'none',
27 'actions': [
28 {
29 'action_name': 'action2',
30 'inputs': [
31 'emit.py',
32 ],
33 'outputs': [
34 '<(PRODUCT_DIR)/out2.txt',
35 ],
36 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
37 'msvs_cygwin_shell': 0,
38 },
39 ],
40 },
41 {
42 'target_name': 'foolib1',
43 'type': 'shared_library',
44 'suppress_wildcard': 1,
45 'sources': ['lib1.c'],
46 },
47 ],
48 'conditions': [
49 ['OS=="linux"', {
50 'target_defaults': {
51 'cflags': ['-fPIC'],
52 },
53 }],
54 ],
55 }
+0
-11
mozc_build_tools/gyp/test/additional-targets/src/dir1/emit.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 f = open(sys.argv[1], 'wb')
9 f.write('Hello from emit.py\n')
10 f.close()
+0
-6
mozc_build_tools/gyp/test/additional-targets/src/dir1/lib1.c less more
0 #ifdef _WIN32
1 __declspec(dllexport)
2 #endif
3 int func1(void) {
4 return 42;
5 }
+0
-31
mozc_build_tools/gyp/test/assembly/gyptest-assembly.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that .hpp files are ignored when included in the source list on all
8 platforms.
9 """
10
11 import sys
12 import TestGyp
13
14 # TODO(bradnelson): get this working for windows.
15 test = TestGyp.TestGyp(formats=['make', 'scons', 'xcode'])
16
17 test.run_gyp('assembly.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 test.build('assembly.gyp', test.ALL, chdir='relocate/src')
22
23 expect = """\
24 Hello from program.c
25 Got 42.
26 """
27 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
28
29
30 test.pass_test()
+0
-4
mozc_build_tools/gyp/test/assembly/src/as.bat less more
0 @echo off
1 :: Mock windows assembler.
2 cl /c %1 /Fo"%2"
3
+0
-59
mozc_build_tools/gyp/test/assembly/src/assembly.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'conditions': [
7 ['OS=="win"', {
8 'defines': ['PLATFORM_WIN'],
9 }],
10 ['OS=="mac"', {
11 'defines': ['PLATFORM_MAC'],
12 }],
13 ['OS=="linux"', {
14 'defines': ['PLATFORM_LINUX'],
15 }],
16 ],
17 },
18 'targets': [
19 {
20 'target_name': 'program',
21 'type': 'executable',
22 'dependencies': ['lib1'],
23 'sources': [
24 'program.c',
25 ],
26 },
27 {
28 'target_name': 'lib1',
29 'type': 'static_library',
30 'sources': [
31 'lib1.S',
32 ],
33 },
34 ],
35 'conditions': [
36 ['OS=="win"', {
37 'target_defaults': {
38 'rules': [
39 {
40 'rule_name': 'assembler',
41 'msvs_cygwin_shell': 0,
42 'extension': 'S',
43 'inputs': [
44 'as.bat',
45 ],
46 'outputs': [
47 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
48 ],
49 'action':
50 ['as.bat', 'lib1.c', '<(_outputs)'],
51 'message': 'Building assembly file <(RULE_INPUT_PATH)',
52 'process_outputs_as_sources': 1,
53 },
54 ],
55 },
56 },],
57 ],
58 }
+0
-10
mozc_build_tools/gyp/test/assembly/src/lib1.S less more
0 #if PLATFORM_WINDOWS || PLATFORM_MAC
1 # define IDENTIFIER(n) _##n
2 #else /* Linux */
3 # define IDENTIFIER(n) n
4 #endif
5
6 .globl IDENTIFIER(lib1_function)
7 IDENTIFIER(lib1_function):
8 movl $42, %eax
9 ret
+0
-3
mozc_build_tools/gyp/test/assembly/src/lib1.c less more
0 int lib1_function(void) {
1 return 42;
2 }
+0
-12
mozc_build_tools/gyp/test/assembly/src/program.c less more
0 #include <stdio.h>
1
2 extern int lib1_function(void);
3
4 int main(int argc, char *argv[])
5 {
6 fprintf(stdout, "Hello from program.c\n");
7 fflush(stdout);
8 fprintf(stdout, "Got %d.\n", lib1_function());
9 fflush(stdout);
10 return 0;
11 }
+0
-77
mozc_build_tools/gyp/test/builddir/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify the settings that cause a set of programs to be created in
8 a specific build directory, and that no intermediate built files
9 get created outside of that build directory hierarchy even when
10 referred to with deeply-nested ../../.. paths.
11 """
12
13 import TestGyp
14
15 # TODO(mmoss): Make only supports (theoretically) a single, global build
16 # directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
17 # gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
18 # generators support, so this doesn't work yet for make.
19 # TODO(mmoss) Make also has the issue that the top-level Makefile is written to
20 # the "--depth" location, which is one level above 'src', but then this test
21 # moves 'src' somewhere else, leaving the Makefile behind, so make can't find
22 # its sources. I'm not sure if make is wrong for writing outside the current
23 # directory, or if the test is wrong for assuming everything generated is under
24 # the current directory.
25 test = TestGyp.TestGyp(formats=['!make'])
26
27 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
28
29 test.relocate('src', 'relocate/src')
30
31 test.subdir('relocate/builddir')
32
33 # Make sure that all the built ../../etc. files only get put under builddir,
34 # by making all of relocate read-only and then making only builddir writable.
35 test.writable('relocate', False)
36 test.writable('relocate/builddir', True)
37
38 # Suppress the test infrastructure's setting SYMROOT on the command line.
39 test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
40
41 expect1 = """\
42 Hello from prog1.c
43 Hello from func1.c
44 """
45
46 expect2 = """\
47 Hello from subdir2/prog2.c
48 Hello from func2.c
49 """
50
51 expect3 = """\
52 Hello from subdir2/subdir3/prog3.c
53 Hello from func3.c
54 """
55
56 expect4 = """\
57 Hello from subdir2/subdir3/subdir4/prog4.c
58 Hello from func4.c
59 """
60
61 expect5 = """\
62 Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
63 Hello from func5.c
64 """
65
66 def run_builddir(prog, expect):
67 dir = 'relocate/builddir/Default/'
68 test.run(program=test.workpath(dir + prog), stdout=expect)
69
70 run_builddir('prog1', expect1)
71 run_builddir('prog2', expect2)
72 run_builddir('prog3', expect3)
73 run_builddir('prog4', expect4)
74 run_builddir('prog5', expect5)
75
76 test.pass_test()
+0
-77
mozc_build_tools/gyp/test/builddir/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify the settings that cause a set of programs to be created in
8 a specific build directory, and that no intermediate built files
9 get created outside of that build directory hierarchy even when
10 referred to with deeply-nested ../../.. paths.
11 """
12
13 import TestGyp
14
15 # TODO(mmoss): Make only supports (theoretically) a single, global build
16 # directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
17 # gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
18 # generators support, so this doesn't work yet for make.
19 # TODO(mmoss) Make also has the issue that the top-level Makefile is written to
20 # the "--depth" location, which is one level above 'src', but then this test
21 # moves 'src' somewhere else, leaving the Makefile behind, so make can't find
22 # its sources. I'm not sure if make is wrong for writing outside the current
23 # directory, or if the test is wrong for assuming everything generated is under
24 # the current directory.
25 test = TestGyp.TestGyp(formats=['!make'])
26
27 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
28
29 test.relocate('src', 'relocate/src')
30
31 test.subdir('relocate/builddir')
32
33 # Make sure that all the built ../../etc. files only get put under builddir,
34 # by making all of relocate read-only and then making only builddir writable.
35 test.writable('relocate', False)
36 test.writable('relocate/builddir', True)
37
38 # Suppress the test infrastructure's setting SYMROOT on the command line.
39 test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
40
41 expect1 = """\
42 Hello from prog1.c
43 Hello from func1.c
44 """
45
46 expect2 = """\
47 Hello from subdir2/prog2.c
48 Hello from func2.c
49 """
50
51 expect3 = """\
52 Hello from subdir2/subdir3/prog3.c
53 Hello from func3.c
54 """
55
56 expect4 = """\
57 Hello from subdir2/subdir3/subdir4/prog4.c
58 Hello from func4.c
59 """
60
61 expect5 = """\
62 Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
63 Hello from func5.c
64 """
65
66 def run_builddir(prog, expect):
67 dir = 'relocate/builddir/Default/'
68 test.run(program=test.workpath(dir + prog), stdout=expect)
69
70 run_builddir('prog1', expect1)
71 run_builddir('prog2', expect2)
72 run_builddir('prog3', expect3)
73 run_builddir('prog4', expect4)
74 run_builddir('prog5', expect5)
75
76 test.pass_test()
+0
-21
mozc_build_tools/gyp/test/builddir/src/builddir.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Default': {
8 'msvs_configuration_attributes': {
9 'OutputDirectory': '<(DEPTH)\\builddir\Default',
10 },
11 },
12 },
13 },
14 'scons_settings': {
15 'sconsbuild_dir': '<(DEPTH)/builddir',
16 },
17 'xcode_settings': {
18 'SYMROOT': '<(DEPTH)/builddir',
19 },
20 }
+0
-6
mozc_build_tools/gyp/test/builddir/src/func1.c less more
0 #include <stdio.h>
1
2 void func1(void)
3 {
4 printf("Hello from func1.c\n");
5 }
+0
-6
mozc_build_tools/gyp/test/builddir/src/func2.c less more
0 #include <stdio.h>
1
2 void func2(void)
3 {
4 printf("Hello from func2.c\n");
5 }
+0
-6
mozc_build_tools/gyp/test/builddir/src/func3.c less more
0 #include <stdio.h>
1
2 void func3(void)
3 {
4 printf("Hello from func3.c\n");
5 }
+0
-6
mozc_build_tools/gyp/test/builddir/src/func4.c less more
0 #include <stdio.h>
1
2 void func4(void)
3 {
4 printf("Hello from func4.c\n");
5 }
+0
-6
mozc_build_tools/gyp/test/builddir/src/func5.c less more
0 #include <stdio.h>
1
2 void func5(void)
3 {
4 printf("Hello from func5.c\n");
5 }
+0
-10
mozc_build_tools/gyp/test/builddir/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void func1(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog1.c\n");
7 func1();
8 return 0;
9 }
+0
-30
mozc_build_tools/gyp/test/builddir/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'pull_in_all',
11 'type': 'none',
12 'dependencies': [
13 'prog1',
14 'subdir2/prog2.gyp:prog2',
15 'subdir2/subdir3/prog3.gyp:prog3',
16 'subdir2/subdir3/subdir4/prog4.gyp:prog4',
17 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
18 ],
19 },
20 {
21 'target_name': 'prog1',
22 'type': 'executable',
23 'sources': [
24 'prog1.c',
25 'func1.c',
26 ],
27 },
28 ],
29 }
+0
-10
mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.c less more
0 #include <stdio.h>
1
2 extern void func2(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/prog2.c\n");
7 func2();
8 return 0;
9 }
+0
-19
mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'sources': [
13 'prog2.c',
14 '../func2.c',
15 ],
16 },
17 ],
18 }
+0
-10
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c less more
0 #include <stdio.h>
1
2 extern void func3(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/prog3.c\n");
7 func3();
8 return 0;
9 }
+0
-19
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'sources': [
13 'prog3.c',
14 '../../func3.c',
15 ],
16 },
17 ],
18 }
+0
-10
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c less more
0 #include <stdio.h>
1
2 extern void func4(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
7 func4();
8 return 0;
9 }
+0
-19
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog4',
11 'type': 'executable',
12 'sources': [
13 'prog4.c',
14 '../../../func4.c',
15 ],
16 },
17 ],
18 }
+0
-10
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c less more
0 #include <stdio.h>
1
2 extern void func5(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
7 func5();
8 return 0;
9 }
+0
-19
mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog5',
11 'type': 'executable',
12 'sources': [
13 'prog5.c',
14 '../../../../func5.c',
15 ],
16 },
17 ],
18 }
+0
-29
mozc_build_tools/gyp/test/compilable/gyptest-headers.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that .hpp files are ignored when included in the source list on all
8 platforms.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('headers.gyp', chdir='src')
16
17 test.relocate('src', 'relocate/src')
18
19 test.build('headers.gyp', test.ALL, chdir='relocate/src')
20
21 expect = """\
22 Hello from program.c
23 Hello from lib1.c
24 """
25 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
26
27
28 test.pass_test()
+0
-26
mozc_build_tools/gyp/test/compilable/src/headers.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'dependencies': [
10 'lib1'
11 ],
12 'sources': [
13 'program.cpp',
14 ],
15 },
16 {
17 'target_name': 'lib1',
18 'type': 'static_library',
19 'sources': [
20 'lib1.hpp',
21 'lib1.cpp',
22 ],
23 },
24 ],
25 }
+0
-7
mozc_build_tools/gyp/test/compilable/src/lib1.cpp less more
0 #include <stdio.h>
1 #include "lib1.hpp"
2
3 void lib1_function(void) {
4 fprintf(stdout, "Hello from lib1.c\n");
5 fflush(stdout);
6 }
+0
-6
mozc_build_tools/gyp/test/compilable/src/lib1.hpp less more
0 #ifndef _lib1_hpp
1 #define _lib1_hpp
2
3 extern void lib1_function(void);
4
5 #endif
+0
-9
mozc_build_tools/gyp/test/compilable/src/program.cpp less more
0 #include <stdio.h>
1 #include "lib1.hpp"
2
3 int main(int argc, char *argv[]) {
4 fprintf(stdout, "Hello from program.c\n");
5 fflush(stdout);
6 lib1_function();
7 return 0;
8 }
+0
-15
mozc_build_tools/gyp/test/configurations/basics/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #ifdef FOO
5 printf("Foo configuration\n");
6 #endif
7 #ifdef DEBUG
8 printf("Debug configuration\n");
9 #endif
10 #ifdef RELEASE
11 printf("Release configuration\n");
12 #endif
13 return 0;
14 }
+0
-32
mozc_build_tools/gyp/test/configurations/basics/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'executable',
9 'sources': [
10 'configurations.c',
11 ],
12 'configurations': {
13 'Debug': {
14 'defines': [
15 'DEBUG',
16 ],
17 },
18 'Release': {
19 'defines': [
20 'RELEASE',
21 ],
22 },
23 'Foo': {
24 'defines': [
25 'FOO',
26 ],
27 },
28 }
29 },
30 ],
31 }
+0
-29
mozc_build_tools/gyp/test/configurations/basics/gyptest-configurations.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('configurations.gyp')
15
16 test.set_configuration('Release')
17 test.build('configurations.gyp')
18 test.run_built_executable('configurations', stdout="Release configuration\n")
19
20 test.set_configuration('Debug')
21 test.build('configurations.gyp')
22 test.run_built_executable('configurations', stdout="Debug configuration\n")
23
24 test.set_configuration('Foo')
25 test.build('configurations.gyp')
26 test.run_built_executable('configurations', stdout="Foo configuration\n")
27
28 test.pass_test()
+0
-21
mozc_build_tools/gyp/test/configurations/inheritance/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #ifdef BASE
5 printf("Base configuration\n");
6 #endif
7 #ifdef COMMON
8 printf("Common configuration\n");
9 #endif
10 #ifdef COMMON2
11 printf("Common2 configuration\n");
12 #endif
13 #ifdef DEBUG
14 printf("Debug configuration\n");
15 #endif
16 #ifdef RELEASE
17 printf("Release configuration\n");
18 #endif
19 return 0;
20 }
+0
-40
mozc_build_tools/gyp/test/configurations/inheritance/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Base': {
8 'abstract': 1,
9 'defines': ['BASE'],
10 },
11 'Common': {
12 'abstract': 1,
13 'inherit_from': ['Base'],
14 'defines': ['COMMON'],
15 },
16 'Common2': {
17 'abstract': 1,
18 'defines': ['COMMON2'],
19 },
20 'Debug': {
21 'inherit_from': ['Common', 'Common2'],
22 'defines': ['DEBUG'],
23 },
24 'Release': {
25 'inherit_from': ['Common', 'Common2'],
26 'defines': ['RELEASE'],
27 },
28 },
29 },
30 'targets': [
31 {
32 'target_name': 'configurations',
33 'type': 'executable',
34 'sources': [
35 'configurations.c',
36 ],
37 },
38 ],
39 }
+0
-33
mozc_build_tools/gyp/test/configurations/inheritance/gyptest-inheritance.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('configurations.gyp')
15
16 test.set_configuration('Release')
17 test.build('configurations.gyp')
18 test.run_built_executable('configurations',
19 stdout=('Base configuration\n'
20 'Common configuration\n'
21 'Common2 configuration\n'
22 'Release configuration\n'))
23
24 test.set_configuration('Debug')
25 test.build('configurations.gyp')
26 test.run_built_executable('configurations',
27 stdout=('Base configuration\n'
28 'Common configuration\n'
29 'Common2 configuration\n'
30 'Debug configuration\n'))
31
32 test.pass_test()
+0
-58
mozc_build_tools/gyp/test/configurations/target_platform/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Debug_Win32': {
8 'msvs_configuration_platform': 'Win32',
9 },
10 'Debug_x64': {
11 'msvs_configuration_platform': 'x64',
12 },
13 },
14 },
15 'targets': [
16 {
17 'target_name': 'left',
18 'type': 'static_library',
19 'sources': [
20 'left.c',
21 ],
22 'configurations': {
23 'Debug_Win32': {
24 'msvs_target_platform': 'x64',
25 },
26 },
27 },
28 {
29 'target_name': 'right',
30 'type': 'static_library',
31 'sources': [
32 'right.c',
33 ],
34 },
35 {
36 'target_name': 'front_left',
37 'type': 'executable',
38 'dependencies': ['left'],
39 'sources': [
40 'front.c',
41 ],
42 'configurations': {
43 'Debug_Win32': {
44 'msvs_target_platform': 'x64',
45 },
46 },
47 },
48 {
49 'target_name': 'front_right',
50 'type': 'executable',
51 'dependencies': ['right'],
52 'sources': [
53 'front.c',
54 ],
55 },
56 ],
57 }
+0
-8
mozc_build_tools/gyp/test/configurations/target_platform/front.c less more
0 #include <stdio.h>
1
2 const char *message(void);
3
4 int main(int argc, char *argv[]) {
5 printf("%s\n", message());
6 return 0;
7 }
+0
-40
mozc_build_tools/gyp/test/configurations/target_platform/gyptest-target_platform.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Tests the msvs specific msvs_target_platform option.
8 """
9
10 import TestGyp
11 import TestCommon
12
13
14 def RunX64(exe, stdout):
15 try:
16 test.run_built_executable(exe, stdout=stdout)
17 except WindowsError, e:
18 # Assume the exe is 64-bit if it can't load on 32-bit systems.
19 # Both versions of the error are required because different versions
20 # of python seem to return different errors for invalid exe type.
21 if e.errno != 193 and '[Error 193]' not in str(e):
22 raise
23
24
25 test = TestGyp.TestGyp(formats=['msvs'])
26
27 test.run_gyp('configurations.gyp')
28
29 test.set_configuration('Debug|x64')
30 test.build('configurations.gyp', rebuild=True)
31 RunX64('front_left', stdout=('left\n'))
32 RunX64('front_right', stdout=('right\n'))
33
34 test.set_configuration('Debug|Win32')
35 test.build('configurations.gyp', rebuild=True)
36 RunX64('front_left', stdout=('left\n'))
37 test.run_built_executable('front_right', stdout=('right\n'))
38
39 test.pass_test()
+0
-3
mozc_build_tools/gyp/test/configurations/target_platform/left.c less more
0 const char *message(void) {
1 return "left";
2 }
+0
-3
mozc_build_tools/gyp/test/configurations/target_platform/right.c less more
0 const char *message(void) {
1 return "right";
2 }
+0
-12
mozc_build_tools/gyp/test/configurations/x64/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[]) {
3 if (sizeof(void*) == 4) {
4 printf("Running Win32\n");
5 } else if (sizeof(void*) == 8) {
6 printf("Running x64\n");
7 } else {
8 printf("Unexpected platform\n");
9 }
10 return 0;
11 }
+0
-26
mozc_build_tools/gyp/test/configurations/x64/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Debug': {
8 'msvs_configuration_platform': 'Win32',
9 },
10 'Debug_x64': {
11 'inherit_from': ['Debug'],
12 'msvs_configuration_platform': 'x64',
13 },
14 },
15 },
16 'targets': [
17 {
18 'target_name': 'configurations',
19 'type': 'executable',
20 'sources': [
21 'configurations.c',
22 ],
23 },
24 ],
25 }
+0
-29
mozc_build_tools/gyp/test/configurations/x64/gyptest-x86.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp(formats=['msvs'])
13
14 test.run_gyp('configurations.gyp')
15
16 for platform in ['Win32', 'x64']:
17 test.set_configuration('Debug|%s' % platform)
18 test.build('configurations.gyp', rebuild=True)
19 try:
20 test.run_built_executable('configurations',
21 stdout=('Running %s\n' % platform))
22 except WindowsError, e:
23 # Assume the exe is 64-bit if it can't load on 32-bit systems.
24 if platform == 'x64' and (e.errno == 193 or '[Error 193]' in str(e)):
25 continue
26 raise
27
28 test.pass_test()
+0
-40
mozc_build_tools/gyp/test/copies/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies.gyp', test.ALL, chdir='relocate/src')
19
20 test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
21
22 test.built_file_must_match('copies-out/file2',
23 'file2 contents\n',
24 chdir='relocate/src')
25
26 test.built_file_must_match('copies-out/directory/file3',
27 'file3 contents\n',
28 chdir='relocate/src')
29 test.built_file_must_match('copies-out/directory/file4',
30 'file4 contents\n',
31 chdir='relocate/src')
32 test.built_file_must_match('copies-out/directory/subdir/file5',
33 'file5 contents\n',
34 chdir='relocate/src')
35 test.built_file_must_match('copies-out/subdir/file6',
36 'file6 contents\n',
37 chdir='relocate/src')
38
39 test.pass_test()
+0
-40
mozc_build_tools/gyp/test/copies/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using the build tool default.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies.gyp', chdir='relocate/src')
19
20 test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
21
22 test.built_file_must_match('copies-out/file2',
23 'file2 contents\n',
24 chdir='relocate/src')
25
26 test.built_file_must_match('copies-out/directory/file3',
27 'file3 contents\n',
28 chdir='relocate/src')
29 test.built_file_must_match('copies-out/directory/file4',
30 'file4 contents\n',
31 chdir='relocate/src')
32 test.built_file_must_match('copies-out/directory/subdir/file5',
33 'file5 contents\n',
34 chdir='relocate/src')
35 test.built_file_must_match('copies-out/subdir/file6',
36 'file6 contents\n',
37 chdir='relocate/src')
38
39 test.pass_test()
+0
-70
mozc_build_tools/gyp/test/copies/src/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'copies1',
8 'type': 'none',
9 'copies': [
10 {
11 'destination': 'copies-out',
12 'files': [
13 'file1',
14 ],
15 },
16 ],
17 },
18 {
19 'target_name': 'copies2',
20 'type': 'none',
21 'copies': [
22 {
23 'destination': '<(PRODUCT_DIR)/copies-out',
24 'files': [
25 'file2',
26 ],
27 },
28 ],
29 },
30 # Copy a directory tree.
31 {
32 'target_name': 'copies_recursive',
33 'type': 'none',
34 'copies': [
35 {
36 'destination': '<(PRODUCT_DIR)/copies-out',
37 'files': [
38 'directory/',
39 ],
40 },
41 ],
42 },
43 # Copy a directory from deeper in the tree (this should not reproduce the
44 # entire directory path in the destination, only the final directory).
45 {
46 'target_name': 'copies_recursive_depth',
47 'type': 'none',
48 'copies': [
49 {
50 'destination': '<(PRODUCT_DIR)/copies-out',
51 'files': [
52 'parentdir/subdir/',
53 ],
54 },
55 ],
56 },
57 # Verify that a null 'files' list doesn't gag the generators.
58 {
59 'target_name': 'copies_null',
60 'type': 'none',
61 'copies': [
62 {
63 'destination': '<(PRODUCT_DIR)/copies-null',
64 'files': [],
65 },
66 ],
67 },
68 ],
69 }
+0
-1
mozc_build_tools/gyp/test/copies/src/directory/file3 less more
0 file3 contents
+0
-1
mozc_build_tools/gyp/test/copies/src/directory/file4 less more
0 file4 contents
+0
-1
mozc_build_tools/gyp/test/copies/src/directory/subdir/file5 less more
0 file5 contents
+0
-1
mozc_build_tools/gyp/test/copies/src/file1 less more
0 file1 contents
+0
-1
mozc_build_tools/gyp/test/copies/src/file2 less more
0 file2 contents
+0
-1
mozc_build_tools/gyp/test/copies/src/parentdir/subdir/file6 less more
0 file6 contents
+0
-21
mozc_build_tools/gyp/test/copies-link/gyptest-copies-link.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using the build tool default.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies-link.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies-link.gyp', chdir='relocate/src')
19
20 test.pass_test()
+0
-61
mozc_build_tools/gyp/test/copies-link/src/copies-link.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'func1',
8 'type': 'static_library',
9 'sources': ['func1.c'],
10 },
11 {
12 'target_name': 'clone_func1',
13 'type': 'none',
14 'dependencies': ['func1'],
15 'actions': [
16 {
17 'action_name': 'cloning library',
18 'inputs': [
19 '<(LIB_DIR)/<(STATIC_LIB_PREFIX)func1<(STATIC_LIB_SUFFIX)'
20 ],
21 'outputs': ['<(PRODUCT_DIR)/alternate/'
22 '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)'],
23 'destination': '<(PRODUCT_DIR)',
24 'action': ['python', 'copy.py', '<@(_inputs)', '<@(_outputs)'],
25 'msvs_cygwin_shell': 0,
26 },
27 ],
28 },
29 {
30 'target_name': 'copy_cloned',
31 'type': 'none',
32 'dependencies': ['clone_func1'],
33 'copies': [
34 {
35 'destination': '<(LIB_DIR)',
36 'files': [
37 '<(PRODUCT_DIR)/alternate/'
38 '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)',
39 ],
40 },
41 ],
42 },
43 {
44 'target_name': 'use_cloned',
45 'type': 'executable',
46 'sources': ['main.c'],
47 'dependencies': ['copy_cloned'],
48 'link_settings': {
49 'conditions': [
50 ['OS=="win"', {
51 'libraries': ['-l"<(LIB_DIR)/cloned.lib"'],
52 }, {
53 'libraries': ['-lcloned'],
54 'ldflags': ['-L <(LIB_DIR)'],
55 }],
56 ],
57 },
58 },
59 ],
60 }
+0
-21
mozc_build_tools/gyp/test/copies-link/src/copy.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import shutil
7 import sys
8
9
10 def main(argv):
11 if len(argv) != 3:
12 print 'USAGE: copy.py <src> <dst>'
13 return 1
14
15 shutil.copy(argv[1], argv[2])
16 return 0
17
18
19 if __name__ == '__main__':
20 sys.exit(main(sys.argv))
+0
-9
mozc_build_tools/gyp/test/copies-link/src/func1.c less more
0 #include <stdio.h>
1
2 extern void func1(void);
3
4 int main(int argc, char *argv[]) {
5 printf("hello from link1\n");
6 func1();
7 return 0;
8 }
+0
-5
mozc_build_tools/gyp/test/copies-link/src/main.c less more
0 #include <stdio.h>
1
2 void func1(void) {
3 printf("hello from func1\n");
4 }
+0
-22
mozc_build_tools/gyp/test/defines/defines-env.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'value%': '5',
7 },
8 'targets': [
9 {
10 'target_name': 'defines',
11 'type': 'executable',
12 'sources': [
13 'defines.c',
14 ],
15 'defines': [
16 'VALUE=<(value)',
17 ],
18 },
19 ],
20 }
21
+0
-14
mozc_build_tools/gyp/test/defines/defines.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 #ifdef FOO
9 printf("FOO is defined\n");
10 #endif
11 printf("VALUE is %d\n", VALUE);
12 return 0;
13 }
+0
-36
mozc_build_tools/gyp/test/defines/defines.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'defines',
8 'type': 'executable',
9 'sources': [
10 'defines.c',
11 ],
12 'defines': [
13 'FOO',
14 'VALUE=1',
15 ],
16 },
17 ],
18 'conditions': [
19 ['OS=="fakeos"', {
20 'targets': [
21 {
22 'target_name': 'fakeosprogram',
23 'type': 'executable',
24 'sources': [
25 'defines.c',
26 ],
27 'defines': [
28 'FOO',
29 'VALUE=1',
30 ],
31 },
32 ],
33 }],
34 ],
35 }
+0
-34
mozc_build_tools/gyp/test/defines/gyptest-define-override.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a default gyp define can be overridden.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 # Command-line define
16 test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
17 test.build('defines.gyp')
18 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
19 # Clean up the exe so subsequent tests don't find an old exe.
20 os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
21
22 # Without "OS" override, fokeosprogram shouldn't be built.
23 test.run_gyp('defines.gyp')
24 test.build('defines.gyp')
25 test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
26
27 # Environment define
28 os.environ['GYP_DEFINES'] = 'OS=fakeos'
29 test.run_gyp('defines.gyp')
30 test.build('defines.gyp')
31 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
32
33 test.pass_test()
+0
-49
mozc_build_tools/gyp/test/defines/gyptest-defines-env-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define, and
8 the use of the environment during regeneration when the gyp file changes.
9 """
10
11 import os
12 import TestGyp
13
14 # Regenerating build files when a gyp file changes is currently only supported
15 # by the make generator.
16 test = TestGyp.TestGyp(formats=['make'])
17
18 try:
19 os.environ['GYP_DEFINES'] = 'value=50'
20 test.run_gyp('defines.gyp')
21 finally:
22 # We clear the environ after calling gyp. When the auto-regeneration happens,
23 # the same define should be reused anyway. Reset to empty string first in
24 # case the platform doesn't support unsetenv.
25 os.environ['GYP_DEFINES'] = ''
26 del os.environ['GYP_DEFINES']
27
28 test.build('defines.gyp')
29
30 expect = """\
31 FOO is defined
32 VALUE is 1
33 """
34 test.run_built_executable('defines', stdout=expect)
35
36 # Sleep so that the changed gyp file will have a newer timestamp than the
37 # previously generated build files.
38 test.sleep()
39 test.write('defines.gyp', test.read('defines-env.gyp'))
40
41 test.build('defines.gyp', test.ALL)
42
43 expect = """\
44 VALUE is 50
45 """
46 test.run_built_executable('defines', stdout=expect)
47
48 test.pass_test()
+0
-85
mozc_build_tools/gyp/test/defines/gyptest-defines-env.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 # With the value only given in environment, it should be used.
16 try:
17 os.environ['GYP_DEFINES'] = 'value=10'
18 test.run_gyp('defines-env.gyp')
19 finally:
20 del os.environ['GYP_DEFINES']
21
22 test.build('defines-env.gyp')
23
24 expect = """\
25 VALUE is 10
26 """
27 test.run_built_executable('defines', stdout=expect)
28
29
30 # With the value given in both command line and environment,
31 # command line should take precedence.
32 try:
33 os.environ['GYP_DEFINES'] = 'value=20'
34 test.run_gyp('defines-env.gyp', '-Dvalue=25')
35 finally:
36 del os.environ['GYP_DEFINES']
37
38 test.sleep()
39 test.touch('defines.c')
40 test.build('defines-env.gyp')
41
42 expect = """\
43 VALUE is 25
44 """
45 test.run_built_executable('defines', stdout=expect)
46
47
48 # With the value only given in environment, it should be ignored if
49 # --ignore-environment is specified.
50 try:
51 os.environ['GYP_DEFINES'] = 'value=30'
52 test.run_gyp('defines-env.gyp', '--ignore-environment')
53 finally:
54 del os.environ['GYP_DEFINES']
55
56 test.sleep()
57 test.touch('defines.c')
58 test.build('defines-env.gyp')
59
60 expect = """\
61 VALUE is 5
62 """
63 test.run_built_executable('defines', stdout=expect)
64
65
66 # With the value given in both command line and environment, and
67 # --ignore-environment also specified, command line should still be used.
68 try:
69 os.environ['GYP_DEFINES'] = 'value=40'
70 test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45')
71 finally:
72 del os.environ['GYP_DEFINES']
73
74 test.sleep()
75 test.touch('defines.c')
76 test.build('defines-env.gyp')
77
78 expect = """\
79 VALUE is 45
80 """
81 test.run_built_executable('defines', stdout=expect)
82
83
84 test.pass_test()
+0
-25
mozc_build_tools/gyp/test/defines/gyptest-defines.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ defines.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('defines.gyp')
15
16 test.build('defines.gyp')
17
18 expect = """\
19 FOO is defined
20 VALUE is 1
21 """
22 test.run_built_executable('defines', stdout=expect)
23
24 test.pass_test()
+0
-11
mozc_build_tools/gyp/test/defines-escaping/defines-escaping.c less more
0 /* Copyright (c) 2010 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf(TEST_FORMAT, TEST_ARGS);
9 return 0;
10 }
+0
-19
mozc_build_tools/gyp/test/defines-escaping/defines-escaping.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'defines_escaping',
8 'type': 'executable',
9 'sources': [
10 'defines-escaping.c',
11 ],
12 'defines': [
13 'TEST_FORMAT="<(test_format)"',
14 'TEST_ARGS=<(test_args)',
15 ],
16 },
17 ],
18 }
+0
-163
mozc_build_tools/gyp/test/defines-escaping/gyptest-defines-escaping.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define using
8 various special characters such as quotes, commas, etc.
9 """
10
11 import os
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 # Tests string literals, percents, and backslash escapes.
17 try:
18 os.environ['GYP_DEFINES'] = \
19 """test_format='%s\\n' test_args='"Simple test of %s with a literal"'"""
20 test.run_gyp('defines-escaping.gyp')
21 finally:
22 del os.environ['GYP_DEFINES']
23
24 test.build('defines-escaping.gyp')
25
26 expect = """\
27 Simple test of %s with a literal
28 """
29 test.run_built_executable('defines_escaping', stdout=expect)
30
31
32 # Test multiple comma-and-space-separated string literals.
33 try:
34 os.environ['GYP_DEFINES'] = \
35 """test_format='%s and %s\\n' test_args='"foo", "bar"'"""
36 test.run_gyp('defines-escaping.gyp')
37 finally:
38 del os.environ['GYP_DEFINES']
39
40 test.sleep()
41 test.touch('defines-escaping.c')
42 test.build('defines-escaping.gyp')
43
44 expect = """\
45 foo and bar
46 """
47 test.run_built_executable('defines_escaping', stdout=expect)
48
49
50 # Test string literals containing quotes.
51 try:
52 os.environ['GYP_DEFINES'] = \
53 ("""test_format='%s %s %s %s %s\\n' """ +
54 """test_args='"\\"These,\\"",""" +
55 """ "\\"words,\\"","""
56 """ "\\"are,\\"",""" +
57 """ "\\"in,\\"",""" +
58 """ "\\"quotes.\\""'""")
59 test.run_gyp('defines-escaping.gyp')
60 finally:
61 del os.environ['GYP_DEFINES']
62
63 test.sleep()
64 test.touch('defines-escaping.c')
65 test.build('defines-escaping.gyp')
66
67 expect = """\
68 "These," "words," "are," "in," "quotes."
69 """
70 test.run_built_executable('defines_escaping', stdout=expect)
71
72
73 # Test string literals containing single quotes.
74 try:
75 os.environ['GYP_DEFINES'] = \
76 ("""test_format='%s %s %s %s %s\\n' """ +
77 """test_args="\\"'These,'\\",""" +
78 """ \\"'words,'\\","""
79 """ \\"'are,'\\",""" +
80 """ \\"'in,'\\",""" +
81 """ \\"'quotes.'\\"" """)
82 test.run_gyp('defines-escaping.gyp')
83 finally:
84 del os.environ['GYP_DEFINES']
85
86 test.sleep()
87 test.touch('defines-escaping.c')
88 test.build('defines-escaping.gyp')
89
90 expect = """\
91 'These,' 'words,' 'are,' 'in,' 'quotes.'
92 """
93 test.run_built_executable('defines_escaping', stdout=expect)
94
95
96 # Test string literals containing different numbers of backslashes before quotes
97 # (to exercise Windows' quoting behaviour).
98 try:
99 os.environ['GYP_DEFINES'] = \
100 ("""test_format='%s\\n%s\\n%s\\n' """ +
101 """test_args='"\\\\\\"1 visible slash\\\\\\"",""" +
102 """ "\\\\\\\\\\"2 visible slashes\\\\\\\\\\"","""
103 """ "\\\\\\\\\\\\\\"3 visible slashes\\\\\\\\\\\\\\""'""")
104 test.run_gyp('defines-escaping.gyp')
105 finally:
106 del os.environ['GYP_DEFINES']
107
108 test.sleep()
109 test.touch('defines-escaping.c')
110 test.build('defines-escaping.gyp')
111
112 expect = """\
113 \\"1 visible slash\\"
114 \\\\"2 visible slashes\\\\"
115 \\\\\\"3 visible slashes\\\\\\"
116 """
117 test.run_built_executable('defines_escaping', stdout=expect)
118
119
120 # Test that various scary sequences are passed unfettered.
121 try:
122 os.environ['GYP_DEFINES'] = \
123 ("""test_format='%s\\n' """ +
124 """test_args='"%PATH%, $foo, &quot; `foo`;"'""")
125 test.run_gyp('defines-escaping.gyp')
126 finally:
127 del os.environ['GYP_DEFINES']
128
129 test.sleep()
130 test.touch('defines-escaping.c')
131 test.build('defines-escaping.gyp')
132
133 expect = """\
134 %PATH%, $foo, &quot; `foo`;
135 """
136 test.run_built_executable('defines_escaping', stdout=expect)
137
138
139 # Test commas and semi-colons preceded by backslashes (to exercise Windows'
140 # quoting behaviour).
141 try:
142 os.environ['GYP_DEFINES'] = \
143 ("""test_format='%s\\n%s\\n' """ +
144 """test_args='"\\\\, \\\\\\\\;",""" +
145 # Same thing again, but enclosed in visible quotes.
146 """ "\\"\\\\, \\\\\\\\;\\""'""")
147 test.run_gyp('defines-escaping.gyp')
148 finally:
149 del os.environ['GYP_DEFINES']
150
151 test.sleep()
152 test.touch('defines-escaping.c')
153 test.build('defines-escaping.gyp')
154
155 expect = """\
156 \\, \\\\;
157 "\\, \\\\;"
158 """
159 test.run_built_executable('defines_escaping', stdout=expect)
160
161 # We deliberately do not test having an odd number of quotes in a string
162 # literal because that isn't feasible in MSVS.
+0
-9
mozc_build_tools/gyp/test/dependencies/a.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 extern int funcB();
5
6 int funcA() {
7 return funcB();
8 }
+0
-3
mozc_build_tools/gyp/test/dependencies/b/b.c less more
0 int funcB() {
1 return 2;
2 }
+0
-15
mozc_build_tools/gyp/test/dependencies/b/b.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'b',
8 'type': 'static_library',
9 'sources': [
10 'b.c',
11 ],
12 },
13 ],
14 }
+0
-4
mozc_build_tools/gyp/test/dependencies/c/c.c less more
0 int funcC() {
1 return 3
2 // Intentional syntax error. This file should never be compiled, so this
3 // shouldn't be a problem.
+0
-22
mozc_build_tools/gyp/test/dependencies/c/c.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'c_unused',
8 'type': 'static_library',
9 'sources': [
10 'c.c',
11 ],
12 },
13 {
14 'target_name': 'd',
15 'type': 'static_library',
16 'sources': [
17 'd.c',
18 ],
19 },
20 ],
21 }
+0
-3
mozc_build_tools/gyp/test/dependencies/c/d.c less more
0 int funcD() {
1 return 4;
2 }
+0
-18
mozc_build_tools/gyp/test/dependencies/extra_targets.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 # This only depends on the "d" target; other targets in c.gyp
13 # should not become part of the build (unlike with 'c/c.gyp:*').
14 'dependencies': ['c/c.gyp:d'],
15 },
16 ],
17 }
+0
-21
mozc_build_tools/gyp/test/dependencies/gyptest-extra-targets.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify that dependencies don't pull unused targets into the build.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('extra_targets.gyp')
15
16 # This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax
17 # error and won't compile.
18 test.build('extra_targets.gyp', test.ALL)
19
20 test.pass_test()
+0
-33
mozc_build_tools/gyp/test/dependencies/gyptest-lib-only.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify that a link time only dependency will get pulled into the set of built
8 targets, even if no executable uses it.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('lib_only.gyp')
16
17 test.build('lib_only.gyp', test.ALL)
18
19 # Make doesn't put static libs in a common 'lib' directory, like it does with
20 # shared libs, so check in the obj path corresponding to the source path.
21 test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target')
22
23 # TODO(bradnelson/mark):
24 # On linux and windows a library target will at least pull its link dependencies
25 # into the generated sln/_main.scons, since not doing so confuses users.
26 # This is not currently implemented on mac, which has the opposite behavior.
27 if test.format == 'xcode':
28 test.built_file_must_not_exist('b', type=test.STATIC_LIB)
29 else:
30 test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b')
31
32 test.pass_test()
+0
-16
mozc_build_tools/gyp/test/dependencies/lib_only.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 'dependencies': ['b/b.gyp:b'],
13 },
14 ],
15 }
+0
-26
mozc_build_tools/gyp/test/dependency-copy/gyptest-copy.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies dependencies do the copy step.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.build('copies.gyp', 'proj2', chdir='src')
17
18 test.run_built_executable('proj1',
19 chdir='src',
20 stdout="Hello from file1.c\n")
21 test.run_built_executable('proj2',
22 chdir='src',
23 stdout="Hello from file2.c\n")
24
25 test.pass_test()
+0
-25
mozc_build_tools/gyp/test/dependency-copy/src/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'proj1',
8 'type': 'executable',
9 'sources': [
10 'file1.c',
11 ],
12 },
13 {
14 'target_name': 'proj2',
15 'type': 'executable',
16 'sources': [
17 'file2.c',
18 ],
19 'dependencies': [
20 'proj1',
21 ]
22 },
23 ],
24 }
+0
-7
mozc_build_tools/gyp/test/dependency-copy/src/file1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from file1.c\n");
5 return 0;
6 }
+0
-7
mozc_build_tools/gyp/test/dependency-copy/src/file2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from file2.c\n");
5 return 0;
6 }
+0
-16
mozc_build_tools/gyp/test/generator-output/actions/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-4
mozc_build_tools/gyp/test/generator-output/actions/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/actions/subdir1/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-44
mozc_build_tools/gyp/test/generator-output/actions/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 ],
13 'actions': [
14 {
15 'action_name': 'make-prog1',
16 'inputs': [
17 'make-prog1.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/prog1.c',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 },
27 {
28 'action_name': 'make-prog2',
29 'inputs': [
30 'make-prog2.py',
31 ],
32 'outputs': [
33 'actions-out/prog2.c',
34 ],
35 'action': [
36 'python', '<(_inputs)', '<@(_outputs)',
37 ],
38 'process_outputs_as_sources': 1,
39 },
40 ],
41 },
42 ],
43 }
+0
-20
mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog1.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog1(void)
12 {
13 printf("Hello from make-prog1.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-20
mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog2.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog2(void)
12 {
13 printf("Hello from make-prog2.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-12
mozc_build_tools/gyp/test/generator-output/actions/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-4
mozc_build_tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/actions/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-11
mozc_build_tools/gyp/test/generator-output/actions/subdir2/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = "Hello from make-file.py\n"
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-31
mozc_build_tools/gyp/test/generator-output/actions/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 'actions-out/file.out',
18 # TODO: enhance testing infrastructure to test this
19 # without having to hard-code the intermediate dir paths.
20 #'<(INTERMEDIATE_DIR)/file.out',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 }
27 ],
28 },
29 ],
30 }
+0
-4
mozc_build_tools/gyp/test/generator-output/copies/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/copies/copies-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-50
mozc_build_tools/gyp/test/generator-output/copies/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_subdir',
8 'type': 'none',
9 'dependencies': [
10 'subdir/subdir.gyp:*',
11 ],
12 },
13 {
14 'target_name': 'copies1',
15 'type': 'none',
16 'copies': [
17 {
18 'destination': 'copies-out',
19 'files': [
20 'file1',
21 ],
22 },
23 ],
24 },
25 {
26 'target_name': 'copies2',
27 'type': 'none',
28 'copies': [
29 {
30 'destination': '<(PRODUCT_DIR)/copies-out',
31 'files': [
32 'file2',
33 ],
34 },
35 ],
36 },
37 # Verify that a null 'files' list doesn't gag the generators.
38 {
39 'target_name': 'copies_null',
40 'type': 'none',
41 'copies': [
42 {
43 'destination': '<(PRODUCT_DIR)/copies-null',
44 'files': [],
45 },
46 ],
47 },
48 ],
49 }
+0
-1
mozc_build_tools/gyp/test/generator-output/copies/file1 less more
0 file1 contents
+0
-1
mozc_build_tools/gyp/test/generator-output/copies/file2 less more
0 file2 contents
+0
-4
mozc_build_tools/gyp/test/generator-output/copies/subdir/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
mozc_build_tools/gyp/test/generator-output/copies/subdir/file3 less more
0 file3 contents
+0
-1
mozc_build_tools/gyp/test/generator-output/copies/subdir/file4 less more
0 file4 contents
+0
-32
mozc_build_tools/gyp/test/generator-output/copies/subdir/subdir.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'copies3',
8 'type': 'none',
9 'copies': [
10 {
11 'destination': 'copies-out',
12 'files': [
13 'file3',
14 ],
15 },
16 ],
17 },
18 {
19 'target_name': 'copies4',
20 'type': 'none',
21 'copies': [
22 {
23 'destination': '<(PRODUCT_DIR)/copies-out',
24 'files': [
25 'file4',
26 ],
27 },
28 ],
29 },
30 ],
31 }
+0
-57
mozc_build_tools/gyp/test/generator-output/gyptest-actions.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies --generator-output= behavior when using actions.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 # All the generated files should go under 'gypfiles'. The source directory
15 # ('actions') should be untouched.
16 test.writable(test.workpath('actions'), False)
17 test.run_gyp('actions.gyp',
18 '--generator-output=' + test.workpath('gypfiles'),
19 chdir='actions')
20
21 test.writable(test.workpath('actions'), True)
22
23 test.relocate('actions', 'relocate/actions')
24 test.relocate('gypfiles', 'relocate/gypfiles')
25
26 test.writable(test.workpath('relocate/actions'), False)
27
28 # Some of the action outputs use "pure" relative paths (i.e. without prefixes
29 # like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under
30 # 'gypfiles', such outputs will still be created relative to the original .gyp
31 # sources. Projects probably wouldn't normally do this, since it kind of defeats
32 # the purpose of '--generator-output', but it is supported behaviour.
33 test.writable(test.workpath('relocate/actions/build'), True)
34 test.writable(test.workpath('relocate/actions/subdir1/build'), True)
35 test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True)
36 test.writable(test.workpath('relocate/actions/subdir2/build'), True)
37 test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True)
38
39 test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles')
40
41 expect = """\
42 Hello from program.c
43 Hello from make-prog1.py
44 Hello from make-prog2.py
45 """
46
47 if test.format == 'xcode':
48 chdir = 'relocate/actions/subdir1'
49 else:
50 chdir = 'relocate/gypfiles'
51 test.run_built_executable('program', chdir=chdir, stdout=expect)
52
53 test.must_match('relocate/actions/subdir2/actions-out/file.out',
54 "Hello from make-file.py\n")
55
56 test.pass_test()
+0
-57
mozc_build_tools/gyp/test/generator-output/gyptest-copies.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.writable(test.workpath('copies'), False)
15
16 test.run_gyp('copies.gyp',
17 '--generator-output=' + test.workpath('gypfiles'),
18 chdir='copies')
19
20 test.writable(test.workpath('copies'), True)
21
22 test.relocate('copies', 'relocate/copies')
23 test.relocate('gypfiles', 'relocate/gypfiles')
24
25 test.writable(test.workpath('relocate/copies'), False)
26
27 test.writable(test.workpath('relocate/copies/build'), True)
28 test.writable(test.workpath('relocate/copies/copies-out'), True)
29 test.writable(test.workpath('relocate/copies/subdir/build'), True)
30 test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
31
32 test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
33
34 test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
35 "file1 contents\n")
36
37 if test.format == 'xcode':
38 chdir = 'relocate/copies/build'
39 elif test.format == 'make':
40 chdir = 'relocate/gypfiles/out'
41 else:
42 chdir = 'relocate/gypfiles'
43 test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
44
45 test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
46 "file3 contents\n")
47
48 if test.format == 'xcode':
49 chdir = 'relocate/copies/subdir/build'
50 elif test.format == 'make':
51 chdir = 'relocate/gypfiles/out'
52 else:
53 chdir = 'relocate/gypfiles'
54 test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
55
56 test.pass_test()
+0
-59
mozc_build_tools/gyp/test/generator-output/gyptest-relocate.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a project hierarchy created with the --generator-output=
8 option can be built even when it's relocated to a different path.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.writable(test.workpath('src'), False)
16
17 test.run_gyp('prog1.gyp',
18 '-Dset_symroot=1',
19 '--generator-output=' + test.workpath('gypfiles'),
20 chdir='src')
21
22 test.writable(test.workpath('src'), True)
23
24 test.relocate('src', 'relocate/src')
25 test.relocate('gypfiles', 'relocate/gypfiles')
26
27 test.writable(test.workpath('relocate/src'), False)
28
29 test.writable(test.workpath('relocate/src/build'), True)
30 test.writable(test.workpath('relocate/src/subdir2/build'), True)
31 test.writable(test.workpath('relocate/src/subdir3/build'), True)
32
33 test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
34
35 chdir = 'relocate/gypfiles'
36
37 expect = """\
38 Hello from %s
39 Hello from inc.h
40 Hello from inc1/include1.h
41 Hello from inc2/include2.h
42 Hello from inc3/include3.h
43 Hello from subdir2/deeper/deeper.h
44 """
45
46 if test.format == 'xcode':
47 chdir = 'relocate/src'
48 test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
49
50 if test.format == 'xcode':
51 chdir = 'relocate/src/subdir2'
52 test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
53
54 if test.format == 'xcode':
55 chdir = 'relocate/src/subdir3'
56 test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
57
58 test.pass_test()
+0
-58
mozc_build_tools/gyp/test/generator-output/gyptest-rules.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies --generator-output= behavior when using rules.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.writable(test.workpath('rules'), False)
15
16 test.run_gyp('rules.gyp',
17 '--generator-output=' + test.workpath('gypfiles'),
18 chdir='rules')
19
20 test.writable(test.workpath('rules'), True)
21
22 test.relocate('rules', 'relocate/rules')
23 test.relocate('gypfiles', 'relocate/gypfiles')
24
25 test.writable(test.workpath('relocate/rules'), False)
26
27 test.writable(test.workpath('relocate/rules/build'), True)
28 test.writable(test.workpath('relocate/rules/subdir1/build'), True)
29 test.writable(test.workpath('relocate/rules/subdir2/build'), True)
30 test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
31
32 test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
33
34 expect = """\
35 Hello from program.c
36 Hello from function1.in1
37 Hello from function2.in1
38 Hello from define3.in0
39 Hello from define4.in0
40 """
41
42 if test.format == 'xcode':
43 chdir = 'relocate/rules/subdir1'
44 else:
45 chdir = 'relocate/gypfiles'
46 test.run_built_executable('program', chdir=chdir, stdout=expect)
47
48 test.must_match('relocate/rules/subdir2/rules-out/file1.out',
49 "Hello from file1.in0\n")
50 test.must_match('relocate/rules/subdir2/rules-out/file2.out',
51 "Hello from file2.in0\n")
52 test.must_match('relocate/rules/subdir2/rules-out/file3.out',
53 "Hello from file3.in1\n")
54 test.must_match('relocate/rules/subdir2/rules-out/file4.out',
55 "Hello from file4.in1\n")
56
57 test.pass_test()
+0
-36
mozc_build_tools/gyp/test/generator-output/gyptest-subdir2-deep.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target from a .gyp file a few subdirectories
8 deep when the --generator-output= option is used to put the build
9 configuration files in a separate directory tree.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.writable(test.workpath('src'), False)
17
18 test.writable(test.workpath('src/subdir2/deeper/build'), True)
19
20 test.run_gyp('deeper.gyp',
21 '-Dset_symroot=1',
22 '--generator-output=' + test.workpath('gypfiles'),
23 chdir='src/subdir2/deeper')
24
25 test.build('deeper.gyp', test.ALL, chdir='gypfiles')
26
27 chdir = 'gypfiles'
28
29 if test.format == 'xcode':
30 chdir = 'src/subdir2/deeper'
31 test.run_built_executable('deeper',
32 chdir=chdir,
33 stdout="Hello from deeper.c\n")
34
35 test.pass_test()
+0
-53
mozc_build_tools/gyp/test/generator-output/gyptest-top-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a project hierarchy created when the --generator-output=
8 option is used to put the build configuration files in a separate
9 directory tree.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.writable(test.workpath('src'), False)
17
18 test.run_gyp('prog1.gyp',
19 '-Dset_symroot=1',
20 '--generator-output=' + test.workpath('gypfiles'),
21 chdir='src')
22
23 test.writable(test.workpath('src/build'), True)
24 test.writable(test.workpath('src/subdir2/build'), True)
25 test.writable(test.workpath('src/subdir3/build'), True)
26
27 test.build('prog1.gyp', test.ALL, chdir='gypfiles')
28
29 chdir = 'gypfiles'
30
31 expect = """\
32 Hello from %s
33 Hello from inc.h
34 Hello from inc1/include1.h
35 Hello from inc2/include2.h
36 Hello from inc3/include3.h
37 Hello from subdir2/deeper/deeper.h
38 """
39
40 if test.format == 'xcode':
41 chdir = 'src'
42 test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
43
44 if test.format == 'xcode':
45 chdir = 'src/subdir2'
46 test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
47
48 if test.format == 'xcode':
49 chdir = 'src/subdir3'
50 test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
51
52 test.pass_test()
+0
-4
mozc_build_tools/gyp/test/generator-output/rules/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-12
mozc_build_tools/gyp/test/generator-output/rules/copy-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = open(sys.argv[1], 'r').read()
9 open(sys.argv[2], 'wb').write(contents)
10
11 sys.exit(0)
+0
-16
mozc_build_tools/gyp/test/generator-output/rules/rules.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-4
mozc_build_tools/gyp/test/generator-output/rules/subdir1/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir1/define3.in0 less more
0 #define STRING3 "Hello from define3.in0\n"
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir1/define4.in0 less more
0 #define STRING4 "Hello from define4.in0\n"
+0
-59
mozc_build_tools/gyp/test/generator-output/rules/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 'function1.in1',
13 'function2.in1',
14 'define3.in0',
15 'define4.in0',
16 ],
17 'include_dirs': [
18 '<(INTERMEDIATE_DIR)',
19 ],
20 'rules': [
21 {
22 'rule_name': 'copy_file_0',
23 'extension': 'in0',
24 'inputs': [
25 '../copy-file.py',
26 ],
27 'outputs': [
28 # TODO: fix SCons and Make to support generated files not
29 # in a variable-named path like <(INTERMEDIATE_DIR)
30 #'<(RULE_INPUT_ROOT).c',
31 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
32 ],
33 'action': [
34 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
35 ],
36 'process_outputs_as_sources': 0,
37 },
38 {
39 'rule_name': 'copy_file_1',
40 'extension': 'in1',
41 'inputs': [
42 '../copy-file.py',
43 ],
44 'outputs': [
45 # TODO: fix SCons and Make to support generated files not
46 # in a variable-named path like <(INTERMEDIATE_DIR)
47 #'<(RULE_INPUT_ROOT).c',
48 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
49 ],
50 'action': [
51 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
52 ],
53 'process_outputs_as_sources': 1,
54 },
55 ],
56 },
57 ],
58 }
+0
-6
mozc_build_tools/gyp/test/generator-output/rules/subdir1/function1.in1 less more
0 #include <stdio.h>
1
2 void function1(void)
3 {
4 printf("Hello from function1.in1\n");
5 }
+0
-6
mozc_build_tools/gyp/test/generator-output/rules/subdir1/function2.in1 less more
0 #include <stdio.h>
1
2 void function2(void)
3 {
4 printf("Hello from function2.in1\n");
5 }
+0
-18
mozc_build_tools/gyp/test/generator-output/rules/subdir1/program.c less more
0 #include <stdio.h>
1 #include "define3.h"
2 #include "define4.h"
3
4 extern void function1(void);
5 extern void function2(void);
6 extern void function3(void);
7 extern void function4(void);
8
9 int main(int argc, char *argv[])
10 {
11 printf("Hello from program.c\n");
12 function1();
13 function2();
14 printf("%s", STRING3);
15 printf("%s", STRING4);
16 return 0;
17 }
+0
-4
mozc_build_tools/gyp/test/generator-output/rules/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir2/file1.in0 less more
0 Hello from file1.in0
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir2/file2.in0 less more
0 Hello from file2.in0
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir2/file3.in1 less more
0 Hello from file3.in1
+0
-1
mozc_build_tools/gyp/test/generator-output/rules/subdir2/file4.in1 less more
0 Hello from file4.in1
+0
-49
mozc_build_tools/gyp/test/generator-output/rules/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'files',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'file1.in0',
12 'file2.in0',
13 'file3.in1',
14 'file4.in1',
15 ],
16 'rules': [
17 {
18 'rule_name': 'copy_file_0',
19 'extension': 'in0',
20 'inputs': [
21 '../copy-file.py',
22 ],
23 'outputs': [
24 'rules-out/<(RULE_INPUT_ROOT).out',
25 ],
26 'action': [
27 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
28 ],
29 'process_outputs_as_sources': 0,
30 },
31 {
32 'rule_name': 'copy_file_1',
33 'extension': 'in1',
34 'inputs': [
35 '../copy-file.py',
36 ],
37 'outputs': [
38 'rules-out/<(RULE_INPUT_ROOT).out',
39 ],
40 'action': [
41 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
42 ],
43 'process_outputs_as_sources': 1,
44 },
45 ],
46 },
47 ],
48 }
+0
-4
mozc_build_tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/src/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
mozc_build_tools/gyp/test/generator-output/src/inc.h less more
0 #define INC_STRING "inc.h"
+0
-1
mozc_build_tools/gyp/test/generator-output/src/inc1/include1.h less more
0 #define INCLUDE1_STRING "inc1/include1.h"
+0
-18
mozc_build_tools/gyp/test/generator-output/src/prog1.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog1.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-28
mozc_build_tools/gyp/test/generator-output/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog1',
11 'type': 'executable',
12 'dependencies': [
13 'subdir2/prog2.gyp:prog2',
14 ],
15 'include_dirs': [
16 '.',
17 'inc1',
18 'subdir2/inc2',
19 'subdir3/inc3',
20 'subdir2/deeper',
21 ],
22 'sources': [
23 'prog1.c',
24 ],
25 },
26 ],
27 }
+0
-4
mozc_build_tools/gyp/test/generator-output/src/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-7
mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from deeper.c\n");
5 return 0;
6 }
+0
-18
mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'deeper',
11 'type': 'executable',
12 'sources': [
13 'deeper.c',
14 ],
15 },
16 ],
17 }
+0
-1
mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h less more
0 #define DEEPER_STRING "subdir2/deeper/deeper.h"
+0
-1
mozc_build_tools/gyp/test/generator-output/src/subdir2/inc2/include2.h less more
0 #define INCLUDE2_STRING "inc2/include2.h"
+0
-18
mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog2.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-28
mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'include_dirs': [
13 '..',
14 '../inc1',
15 'inc2',
16 '../subdir3/inc3',
17 'deeper',
18 ],
19 'dependencies': [
20 '../subdir3/prog3.gyp:prog3',
21 ],
22 'sources': [
23 'prog2.c',
24 ],
25 },
26 ],
27 }
+0
-4
mozc_build_tools/gyp/test/generator-output/src/subdir3/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
mozc_build_tools/gyp/test/generator-output/src/subdir3/inc3/include3.h less more
0 #define INCLUDE3_STRING "inc3/include3.h"
+0
-18
mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog3.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-25
mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'include_dirs': [
13 '..',
14 '../inc1',
15 '../subdir2/inc2',
16 'inc3',
17 '../subdir2/deeper',
18 ],
19 'sources': [
20 'prog3.c',
21 ],
22 },
23 ],
24 }
+0
-16
mozc_build_tools/gyp/test/generator-output/src/symroot.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'set_symroot%': 0,
7 },
8 'conditions': [
9 ['set_symroot == 1', {
10 'xcode_settings': {
11 'SYMROOT': '<(DEPTH)/build',
12 },
13 }],
14 ],
15 }
+0
-24
mozc_build_tools/gyp/test/hello/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using an explicit build target of 'all'.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp', test.ALL)
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', test.ALL)
22
23 test.pass_test()
+0
-24
mozc_build_tools/gyp/test/hello/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using the default build target.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp')
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', test.DEFAULT)
22
23 test.pass_test()
+0
-32
mozc_build_tools/gyp/test/hello/gyptest-disable-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that Makefiles don't get rebuilt when a source gyp file changes and
8 the disable_regeneration generator flag is set.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
16
17 test.build('hello.gyp', test.ALL)
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 # Sleep so that the changed gyp file will have a newer timestamp than the
22 # previously generated build files.
23 test.sleep()
24 test.write('hello.gyp', test.read('hello2.gyp'))
25
26 test.build('hello.gyp', test.ALL)
27
28 # Should still be the old executable, as regeneration was disabled.
29 test.run_built_executable('hello', stdout="Hello, world!\n")
30
31 test.pass_test()
+0
-32
mozc_build_tools/gyp/test/hello/gyptest-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that Makefiles get rebuilt when a source gyp file changes.
8 """
9
10 import TestGyp
11
12 # Regenerating build files when a gyp file changes is currently only supported
13 # by the make generator.
14 test = TestGyp.TestGyp(formats=['make'])
15
16 test.run_gyp('hello.gyp')
17
18 test.build('hello.gyp', test.ALL)
19
20 test.run_built_executable('hello', stdout="Hello, world!\n")
21
22 # Sleep so that the changed gyp file will have a newer timestamp than the
23 # previously generated build files.
24 test.sleep()
25 test.write('hello.gyp', test.read('hello2.gyp'))
26
27 test.build('hello.gyp', test.ALL)
28
29 test.run_built_executable('hello', stdout="Hello, two!\n")
30
31 test.pass_test()
+0
-24
mozc_build_tools/gyp/test/hello/gyptest-target.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using an explicit build target of 'hello'.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp', 'hello')
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', 'hello')
22
23 test.pass_test()
+0
-11
mozc_build_tools/gyp/test/hello/hello.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello, world!\n");
9 return 0;
10 }
+0
-15
mozc_build_tools/gyp/test/hello/hello.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello',
8 'type': 'executable',
9 'sources': [
10 'hello.c',
11 ],
12 },
13 ],
14 }
+0
-11
mozc_build_tools/gyp/test/hello/hello2.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello, two!\n");
9 return 0;
10 }
+0
-15
mozc_build_tools/gyp/test/hello/hello2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello',
8 'type': 'executable',
9 'sources': [
10 'hello2.c',
11 ],
12 },
13 ],
14 }
+0
-44
mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation
8 and with regeneration.
9 """
10
11 import os
12 import TestGyp
13
14 # Regenerating build files when a gyp file changes is currently only supported
15 # by the make generator.
16 test = TestGyp.TestGyp(formats=['make'])
17
18 os.environ['HOME'] = os.path.abspath('home')
19
20 test.run_gyp('all.gyp', chdir='src')
21
22 # After relocating, we should still be able to build (build file shouldn't
23 # contain relative reference to ~/.gyp/includes.gypi)
24 test.relocate('src', 'relocate/src')
25
26 test.build('all.gyp', test.ALL, chdir='relocate/src')
27
28 test.run_built_executable('printfoo',
29 chdir='relocate/src',
30 stdout="FOO is fromhome\n");
31
32 # Building should notice any changes to ~/.gyp/includes.gypi and regyp.
33 test.sleep()
34
35 test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
36
37 test.build('all.gyp', test.ALL, chdir='relocate/src')
38
39 test.run_built_executable('printfoo',
40 chdir='relocate/src',
41 stdout="FOO is fromhome2\n");
42
43 test.pass_test()
+0
-30
mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies inclusion of $HOME/.gyp/includes.gypi works.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 os.environ['HOME'] = os.path.abspath('home')
16
17 test.run_gyp('all.gyp', chdir='src')
18
19 # After relocating, we should still be able to build (build file shouldn't
20 # contain relative reference to ~/.gyp/includes.gypi)
21 test.relocate('src', 'relocate/src')
22
23 test.build('all.gyp', test.ALL, chdir='relocate/src')
24
25 test.run_built_executable('printfoo',
26 chdir='relocate/src',
27 stdout="FOO is fromhome\n");
28
29 test.pass_test()
+0
-5
mozc_build_tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi less more
0 {
1 'variables': {
2 'foo': '"fromhome"',
3 },
4 }
+0
-5
mozc_build_tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi less more
0 {
1 'variables': {
2 'foo': '"fromhome2"',
3 },
4 }
+0
-22
mozc_build_tools/gyp/test/home_dot_gyp/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'foo%': '"fromdefault"',
7 },
8 'targets': [
9 {
10 'target_name': 'printfoo',
11 'type': 'executable',
12 'sources': [
13 'printfoo.c',
14 ],
15 'defines': [
16 'FOO=<(foo)',
17 ],
18 },
19 ],
20 }
21
+0
-7
mozc_build_tools/gyp/test/home_dot_gyp/src/printfoo.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("FOO is %s\n", FOO);
5 return 0;
6 }
+0
-42
mozc_build_tools/gyp/test/include_dirs/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies use of include_dirs when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('includes.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('includes.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from includes.c
22 Hello from inc.h
23 Hello from include1.h
24 Hello from subdir/inc2/include2.h
25 """
26 test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
27
28 if test.format == 'xcode':
29 chdir='relocate/src/subdir'
30 else:
31 chdir='relocate/src'
32
33 expect = """\
34 Hello from subdir/subdir_includes.c
35 Hello from subdir/inc.h
36 Hello from include1.h
37 Hello from subdir/inc2/include2.h
38 """
39 test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
40
41 test.pass_test()
+0
-42
mozc_build_tools/gyp/test/include_dirs/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies use of include_dirs when using the default build target.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('includes.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('includes.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from includes.c
22 Hello from inc.h
23 Hello from include1.h
24 Hello from subdir/inc2/include2.h
25 """
26 test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
27
28 if test.format == 'xcode':
29 chdir='relocate/src/subdir'
30 else:
31 chdir='relocate/src'
32
33 expect = """\
34 Hello from subdir/subdir_includes.c
35 Hello from subdir/inc.h
36 Hello from include1.h
37 Hello from subdir/inc2/include2.h
38 """
39 test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
40
41 test.pass_test()
+0
-1
mozc_build_tools/gyp/test/include_dirs/src/inc.h less more
0 #define INC_STRING "inc.h"
+0
-1
mozc_build_tools/gyp/test/include_dirs/src/inc1/include1.h less more
0 #define INCLUDE1_STRING "include1.h"
+0
-14
mozc_build_tools/gyp/test/include_dirs/src/includes.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello from includes.c\n");
9 printf("Hello from %s\n", INC_STRING);
10 printf("Hello from %s\n", INCLUDE1_STRING);
11 printf("Hello from %s\n", INCLUDE2_STRING);
12 return 0;
13 }
+0
-23
mozc_build_tools/gyp/test/include_dirs/src/includes.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'includes',
8 'type': 'executable',
9 'dependencies': [
10 'subdir/subdir_includes.gyp:subdir_includes',
11 ],
12 'include_dirs': [
13 '.',
14 'inc1',
15 'subdir/inc2',
16 ],
17 'sources': [
18 'includes.c',
19 ],
20 },
21 ],
22 }
+0
-1
mozc_build_tools/gyp/test/include_dirs/src/subdir/inc.h less more
0 #define INC_STRING "subdir/inc.h"
+0
-1
mozc_build_tools/gyp/test/include_dirs/src/subdir/inc2/include2.h less more
0 #define INCLUDE2_STRING "subdir/inc2/include2.h"
+0
-14
mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello from subdir/subdir_includes.c\n");
9 printf("Hello from %s\n", INC_STRING);
10 printf("Hello from %s\n", INCLUDE1_STRING);
11 printf("Hello from %s\n", INCLUDE2_STRING);
12 return 0;
13 }
+0
-20
mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'subdir_includes',
8 'type': 'executable',
9 'include_dirs': [
10 '.',
11 '../inc1',
12 'inc2',
13 ],
14 'sources': [
15 'subdir_includes.c',
16 ],
17 },
18 ],
19 }
+0
-17
mozc_build_tools/gyp/test/lib/README.txt less more
0 Supporting modules for GYP testing.
1
2 TestCmd.py
3 TestCommon.py
4
5 Modules for generic testing of command-line utilities,
6 specifically including the ability to copy a test configuration
7 to temporary directories (with default cleanup on exit) as part
8 of running test scripts that invoke commands, compare actual
9 against expected output, etc.
10
11 Our copies of these come from the SCons project,
12 http://www.scons.org/.
13
14 TestGyp.py
15
16 Modules for GYP-specific tests, of course.
+0
-1591
mozc_build_tools/gyp/test/lib/TestCmd.py less more
0 """
1 TestCmd.py: a testing framework for commands and scripts.
2
3 The TestCmd module provides a framework for portable automated testing
4 of executable commands and scripts (in any language, not just Python),
5 especially commands and scripts that require file system interaction.
6
7 In addition to running tests and evaluating conditions, the TestCmd
8 module manages and cleans up one or more temporary workspace
9 directories, and provides methods for creating files and directories in
10 those workspace directories from in-line data, here-documents), allowing
11 tests to be completely self-contained.
12
13 A TestCmd environment object is created via the usual invocation:
14
15 import TestCmd
16 test = TestCmd.TestCmd()
17
18 There are a bunch of keyword arguments available at instantiation:
19
20 test = TestCmd.TestCmd(description = 'string',
21 program = 'program_or_script_to_test',
22 interpreter = 'script_interpreter',
23 workdir = 'prefix',
24 subdir = 'subdir',
25 verbose = Boolean,
26 match = default_match_function,
27 diff = default_diff_function,
28 combine = Boolean)
29
30 There are a bunch of methods that let you do different things:
31
32 test.verbose_set(1)
33
34 test.description_set('string')
35
36 test.program_set('program_or_script_to_test')
37
38 test.interpreter_set('script_interpreter')
39 test.interpreter_set(['script_interpreter', 'arg'])
40
41 test.workdir_set('prefix')
42 test.workdir_set('')
43
44 test.workpath('file')
45 test.workpath('subdir', 'file')
46
47 test.subdir('subdir', ...)
48
49 test.rmdir('subdir', ...)
50
51 test.write('file', "contents\n")
52 test.write(['subdir', 'file'], "contents\n")
53
54 test.read('file')
55 test.read(['subdir', 'file'])
56 test.read('file', mode)
57 test.read(['subdir', 'file'], mode)
58
59 test.writable('dir', 1)
60 test.writable('dir', None)
61
62 test.preserve(condition, ...)
63
64 test.cleanup(condition)
65
66 test.command_args(program = 'program_or_script_to_run',
67 interpreter = 'script_interpreter',
68 arguments = 'arguments to pass to program')
69
70 test.run(program = 'program_or_script_to_run',
71 interpreter = 'script_interpreter',
72 arguments = 'arguments to pass to program',
73 chdir = 'directory_to_chdir_to',
74 stdin = 'input to feed to the program\n')
75 universal_newlines = True)
76
77 p = test.start(program = 'program_or_script_to_run',
78 interpreter = 'script_interpreter',
79 arguments = 'arguments to pass to program',
80 universal_newlines = None)
81
82 test.finish(self, p)
83
84 test.pass_test()
85 test.pass_test(condition)
86 test.pass_test(condition, function)
87
88 test.fail_test()
89 test.fail_test(condition)
90 test.fail_test(condition, function)
91 test.fail_test(condition, function, skip)
92
93 test.no_result()
94 test.no_result(condition)
95 test.no_result(condition, function)
96 test.no_result(condition, function, skip)
97
98 test.stdout()
99 test.stdout(run)
100
101 test.stderr()
102 test.stderr(run)
103
104 test.symlink(target, link)
105
106 test.banner(string)
107 test.banner(string, width)
108
109 test.diff(actual, expected)
110
111 test.match(actual, expected)
112
113 test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
114 test.match_exact(["actual 1\n", "actual 2\n"],
115 ["expected 1\n", "expected 2\n"])
116
117 test.match_re("actual 1\nactual 2\n", regex_string)
118 test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
119
120 test.match_re_dotall("actual 1\nactual 2\n", regex_string)
121 test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
122
123 test.tempdir()
124 test.tempdir('temporary-directory')
125
126 test.sleep()
127 test.sleep(seconds)
128
129 test.where_is('foo')
130 test.where_is('foo', 'PATH1:PATH2')
131 test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
132
133 test.unlink('file')
134 test.unlink('subdir', 'file')
135
136 The TestCmd module provides pass_test(), fail_test(), and no_result()
137 unbound functions that report test results for use with the Aegis change
138 management system. These methods terminate the test immediately,
139 reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
140 status 0 (success), 1 or 2 respectively. This allows for a distinction
141 between an actual failed test and a test that could not be properly
142 evaluated because of an external condition (such as a full file system
143 or incorrect permissions).
144
145 import TestCmd
146
147 TestCmd.pass_test()
148 TestCmd.pass_test(condition)
149 TestCmd.pass_test(condition, function)
150
151 TestCmd.fail_test()
152 TestCmd.fail_test(condition)
153 TestCmd.fail_test(condition, function)
154 TestCmd.fail_test(condition, function, skip)
155
156 TestCmd.no_result()
157 TestCmd.no_result(condition)
158 TestCmd.no_result(condition, function)
159 TestCmd.no_result(condition, function, skip)
160
161 The TestCmd module also provides unbound functions that handle matching
162 in the same way as the match_*() methods described above.
163
164 import TestCmd
165
166 test = TestCmd.TestCmd(match = TestCmd.match_exact)
167
168 test = TestCmd.TestCmd(match = TestCmd.match_re)
169
170 test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
171
172 The TestCmd module provides unbound functions that can be used for the
173 "diff" argument to TestCmd.TestCmd instantiation:
174
175 import TestCmd
176
177 test = TestCmd.TestCmd(match = TestCmd.match_re,
178 diff = TestCmd.diff_re)
179
180 test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
181
182 The "diff" argument can also be used with standard difflib functions:
183
184 import difflib
185
186 test = TestCmd.TestCmd(diff = difflib.context_diff)
187
188 test = TestCmd.TestCmd(diff = difflib.unified_diff)
189
190 Lastly, the where_is() method also exists in an unbound function
191 version.
192
193 import TestCmd
194
195 TestCmd.where_is('foo')
196 TestCmd.where_is('foo', 'PATH1:PATH2')
197 TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
198 """
199
200 # Copyright 2000-2010 Steven Knight
201 # This module is free software, and you may redistribute it and/or modify
202 # it under the same terms as Python itself, so long as this copyright message
203 # and disclaimer are retained in their original form.
204 #
205 # IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
206 # SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
207 # THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
208 # DAMAGE.
209 #
210 # THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
211 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
212 # PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
213 # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
214 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
215
216 __author__ = "Steven Knight <knight at baldmt dot com>"
217 __revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight"
218 __version__ = "0.37"
219
220 import errno
221 import os
222 import os.path
223 import re
224 import shutil
225 import stat
226 import string
227 import sys
228 import tempfile
229 import time
230 import traceback
231 import types
232 import UserList
233
234 __all__ = [
235 'diff_re',
236 'fail_test',
237 'no_result',
238 'pass_test',
239 'match_exact',
240 'match_re',
241 'match_re_dotall',
242 'python_executable',
243 'TestCmd'
244 ]
245
246 try:
247 import difflib
248 except ImportError:
249 __all__.append('simple_diff')
250
251 def is_List(e):
252 return type(e) is types.ListType \
253 or isinstance(e, UserList.UserList)
254
255 try:
256 from UserString import UserString
257 except ImportError:
258 class UserString:
259 pass
260
261 if hasattr(types, 'UnicodeType'):
262 def is_String(e):
263 return type(e) is types.StringType \
264 or type(e) is types.UnicodeType \
265 or isinstance(e, UserString)
266 else:
267 def is_String(e):
268 return type(e) is types.StringType or isinstance(e, UserString)
269
270 tempfile.template = 'testcmd.'
271 if os.name in ('posix', 'nt'):
272 tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
273 else:
274 tempfile.template = 'testcmd.'
275
276 re_space = re.compile('\s')
277
278 _Cleanup = []
279
280 _chain_to_exitfunc = None
281
282 def _clean():
283 global _Cleanup
284 cleanlist = filter(None, _Cleanup)
285 del _Cleanup[:]
286 cleanlist.reverse()
287 for test in cleanlist:
288 test.cleanup()
289 if _chain_to_exitfunc:
290 _chain_to_exitfunc()
291
292 try:
293 import atexit
294 except ImportError:
295 # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
296 try:
297 _chain_to_exitfunc = sys.exitfunc
298 except AttributeError:
299 pass
300 sys.exitfunc = _clean
301 else:
302 atexit.register(_clean)
303
304 try:
305 zip
306 except NameError:
307 def zip(*lists):
308 result = []
309 for i in xrange(min(map(len, lists))):
310 result.append(tuple(map(lambda l, i=i: l[i], lists)))
311 return result
312
313 class Collector:
314 def __init__(self, top):
315 self.entries = [top]
316 def __call__(self, arg, dirname, names):
317 pathjoin = lambda n, d=dirname: os.path.join(d, n)
318 self.entries.extend(map(pathjoin, names))
319
320 def _caller(tblist, skip):
321 string = ""
322 arr = []
323 for file, line, name, text in tblist:
324 if file[-10:] == "TestCmd.py":
325 break
326 arr = [(file, line, name, text)] + arr
327 atfrom = "at"
328 for file, line, name, text in arr[skip:]:
329 if name in ("?", "<module>"):
330 name = ""
331 else:
332 name = " (" + name + ")"
333 string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
334 atfrom = "\tfrom"
335 return string
336
337 def fail_test(self = None, condition = 1, function = None, skip = 0):
338 """Cause the test to fail.
339
340 By default, the fail_test() method reports that the test FAILED
341 and exits with a status of 1. If a condition argument is supplied,
342 the test fails only if the condition is true.
343 """
344 if not condition:
345 return
346 if not function is None:
347 function()
348 of = ""
349 desc = ""
350 sep = " "
351 if not self is None:
352 if self.program:
353 of = " of " + self.program
354 sep = "\n\t"
355 if self.description:
356 desc = " [" + self.description + "]"
357 sep = "\n\t"
358
359 at = _caller(traceback.extract_stack(), skip)
360 sys.stderr.write("FAILED test" + of + desc + sep + at)
361
362 sys.exit(1)
363
364 def no_result(self = None, condition = 1, function = None, skip = 0):
365 """Causes a test to exit with no valid result.
366
367 By default, the no_result() method reports NO RESULT for the test
368 and exits with a status of 2. If a condition argument is supplied,
369 the test fails only if the condition is true.
370 """
371 if not condition:
372 return
373 if not function is None:
374 function()
375 of = ""
376 desc = ""
377 sep = " "
378 if not self is None:
379 if self.program:
380 of = " of " + self.program
381 sep = "\n\t"
382 if self.description:
383 desc = " [" + self.description + "]"
384 sep = "\n\t"
385
386 at = _caller(traceback.extract_stack(), skip)
387 sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
388
389 sys.exit(2)
390
391 def pass_test(self = None, condition = 1, function = None):
392 """Causes a test to pass.
393
394 By default, the pass_test() method reports PASSED for the test
395 and exits with a status of 0. If a condition argument is supplied,
396 the test passes only if the condition is true.
397 """
398 if not condition:
399 return
400 if not function is None:
401 function()
402 sys.stderr.write("PASSED\n")
403 sys.exit(0)
404
405 def match_exact(lines = None, matches = None):
406 """
407 """
408 if not is_List(lines):
409 lines = string.split(lines, "\n")
410 if not is_List(matches):
411 matches = string.split(matches, "\n")
412 if len(lines) != len(matches):
413 return
414 for i in range(len(lines)):
415 if lines[i] != matches[i]:
416 return
417 return 1
418
419 def match_re(lines = None, res = None):
420 """
421 """
422 if not is_List(lines):
423 lines = string.split(lines, "\n")
424 if not is_List(res):
425 res = string.split(res, "\n")
426 if len(lines) != len(res):
427 return
428 for i in range(len(lines)):
429 s = "^" + res[i] + "$"
430 try:
431 expr = re.compile(s)
432 except re.error, e:
433 msg = "Regular expression error in %s: %s"
434 raise re.error, msg % (repr(s), e[0])
435 if not expr.search(lines[i]):
436 return
437 return 1
438
439 def match_re_dotall(lines = None, res = None):
440 """
441 """
442 if not type(lines) is type(""):
443 lines = string.join(lines, "\n")
444 if not type(res) is type(""):
445 res = string.join(res, "\n")
446 s = "^" + res + "$"
447 try:
448 expr = re.compile(s, re.DOTALL)
449 except re.error, e:
450 msg = "Regular expression error in %s: %s"
451 raise re.error, msg % (repr(s), e[0])
452 if expr.match(lines):
453 return 1
454
455 try:
456 import difflib
457 except ImportError:
458 pass
459 else:
460 def simple_diff(a, b, fromfile='', tofile='',
461 fromfiledate='', tofiledate='', n=3, lineterm='\n'):
462 """
463 A function with the same calling signature as difflib.context_diff
464 (diff -c) and difflib.unified_diff (diff -u) but which prints
465 output like the simple, unadorned 'diff" command.
466 """
467 sm = difflib.SequenceMatcher(None, a, b)
468 def comma(x1, x2):
469 return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
470 result = []
471 for op, a1, a2, b1, b2 in sm.get_opcodes():
472 if op == 'delete':
473 result.append("%sd%d" % (comma(a1, a2), b1))
474 result.extend(map(lambda l: '< ' + l, a[a1:a2]))
475 elif op == 'insert':
476 result.append("%da%s" % (a1, comma(b1, b2)))
477 result.extend(map(lambda l: '> ' + l, b[b1:b2]))
478 elif op == 'replace':
479 result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
480 result.extend(map(lambda l: '< ' + l, a[a1:a2]))
481 result.append('---')
482 result.extend(map(lambda l: '> ' + l, b[b1:b2]))
483 return result
484
485 def diff_re(a, b, fromfile='', tofile='',
486 fromfiledate='', tofiledate='', n=3, lineterm='\n'):
487 """
488 A simple "diff" of two sets of lines when the expected lines
489 are regular expressions. This is a really dumb thing that
490 just compares each line in turn, so it doesn't look for
491 chunks of matching lines and the like--but at least it lets
492 you know exactly which line first didn't compare correctl...
493 """
494 result = []
495 diff = len(a) - len(b)
496 if diff < 0:
497 a = a + ['']*(-diff)
498 elif diff > 0:
499 b = b + ['']*diff
500 i = 0
501 for aline, bline in zip(a, b):
502 s = "^" + aline + "$"
503 try:
504 expr = re.compile(s)
505 except re.error, e:
506 msg = "Regular expression error in %s: %s"
507 raise re.error, msg % (repr(s), e[0])
508 if not expr.search(bline):
509 result.append("%sc%s" % (i+1, i+1))
510 result.append('< ' + repr(a[i]))
511 result.append('---')
512 result.append('> ' + repr(b[i]))
513 i = i+1
514 return result
515
516 if os.name == 'java':
517
518 python_executable = os.path.join(sys.prefix, 'jython')
519
520 else:
521
522 python_executable = sys.executable
523
524 if sys.platform == 'win32':
525
526 default_sleep_seconds = 2
527
528 def where_is(file, path=None, pathext=None):
529 if path is None:
530 path = os.environ['PATH']
531 if is_String(path):
532 path = string.split(path, os.pathsep)
533 if pathext is None:
534 pathext = os.environ['PATHEXT']
535 if is_String(pathext):
536 pathext = string.split(pathext, os.pathsep)
537 for ext in pathext:
538 if string.lower(ext) == string.lower(file[-len(ext):]):
539 pathext = ['']
540 break
541 for dir in path:
542 f = os.path.join(dir, file)
543 for ext in pathext:
544 fext = f + ext
545 if os.path.isfile(fext):
546 return fext
547 return None
548
549 else:
550
551 def where_is(file, path=None, pathext=None):
552 if path is None:
553 path = os.environ['PATH']
554 if is_String(path):
555 path = string.split(path, os.pathsep)
556 for dir in path:
557 f = os.path.join(dir, file)
558 if os.path.isfile(f):
559 try:
560 st = os.stat(f)
561 except OSError:
562 continue
563 if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
564 return f
565 return None
566
567 default_sleep_seconds = 1
568
569
570
571 try:
572 import subprocess
573 except ImportError:
574 # The subprocess module doesn't exist in this version of Python,
575 # so we're going to cobble up something that looks just enough
576 # like its API for our purposes below.
577 import new
578
579 subprocess = new.module('subprocess')
580
581 subprocess.PIPE = 'PIPE'
582 subprocess.STDOUT = 'STDOUT'
583 subprocess.mswindows = (sys.platform == 'win32')
584
585 try:
586 import popen2
587 popen2.Popen3
588 except AttributeError:
589 class Popen3:
590 universal_newlines = 1
591 def __init__(self, command, **kw):
592 if sys.platform == 'win32' and command[0] == '"':
593 command = '"' + command + '"'
594 (stdin, stdout, stderr) = os.popen3(' ' + command)
595 self.stdin = stdin
596 self.stdout = stdout
597 self.stderr = stderr
598 def close_output(self):
599 self.stdout.close()
600 self.resultcode = self.stderr.close()
601 def wait(self):
602 resultcode = self.resultcode
603 if os.WIFEXITED(resultcode):
604 return os.WEXITSTATUS(resultcode)
605 elif os.WIFSIGNALED(resultcode):
606 return os.WTERMSIG(resultcode)
607 else:
608 return None
609
610 else:
611 try:
612 popen2.Popen4
613 except AttributeError:
614 # A cribbed Popen4 class, with some retrofitted code from
615 # the Python 1.5 Popen3 class methods to do certain things
616 # by hand.
617 class Popen4(popen2.Popen3):
618 childerr = None
619
620 def __init__(self, cmd, bufsize=-1):
621 p2cread, p2cwrite = os.pipe()
622 c2pread, c2pwrite = os.pipe()
623 self.pid = os.fork()
624 if self.pid == 0:
625 # Child
626 os.dup2(p2cread, 0)
627 os.dup2(c2pwrite, 1)
628 os.dup2(c2pwrite, 2)
629 for i in range(3, popen2.MAXFD):
630 try:
631 os.close(i)
632 except: pass
633 try:
634 os.execvp(cmd[0], cmd)
635 finally:
636 os._exit(1)
637 # Shouldn't come here, I guess
638 os._exit(1)
639 os.close(p2cread)
640 self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
641 os.close(c2pwrite)
642 self.fromchild = os.fdopen(c2pread, 'r', bufsize)
643 popen2._active.append(self)
644
645 popen2.Popen4 = Popen4
646
647 class Popen3(popen2.Popen3, popen2.Popen4):
648 universal_newlines = 1
649 def __init__(self, command, **kw):
650 if kw.get('stderr') == 'STDOUT':
651 apply(popen2.Popen4.__init__, (self, command, 1))
652 else:
653 apply(popen2.Popen3.__init__, (self, command, 1))
654 self.stdin = self.tochild
655 self.stdout = self.fromchild
656 self.stderr = self.childerr
657 def wait(self, *args, **kw):
658 resultcode = apply(popen2.Popen3.wait, (self,)+args, kw)
659 if os.WIFEXITED(resultcode):
660 return os.WEXITSTATUS(resultcode)
661 elif os.WIFSIGNALED(resultcode):
662 return os.WTERMSIG(resultcode)
663 else:
664 return None
665
666 subprocess.Popen = Popen3
667
668
669
670 # From Josiah Carlson,
671 # ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
672 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
673
674 PIPE = subprocess.PIPE
675
676 if subprocess.mswindows:
677 from win32file import ReadFile, WriteFile
678 from win32pipe import PeekNamedPipe
679 import msvcrt
680 else:
681 import select
682 import fcntl
683
684 try: fcntl.F_GETFL
685 except AttributeError: fcntl.F_GETFL = 3
686
687 try: fcntl.F_SETFL
688 except AttributeError: fcntl.F_SETFL = 4
689
690 class Popen(subprocess.Popen):
691 def recv(self, maxsize=None):
692 return self._recv('stdout', maxsize)
693
694 def recv_err(self, maxsize=None):
695 return self._recv('stderr', maxsize)
696
697 def send_recv(self, input='', maxsize=None):
698 return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
699
700 def get_conn_maxsize(self, which, maxsize):
701 if maxsize is None:
702 maxsize = 1024
703 elif maxsize < 1:
704 maxsize = 1
705 return getattr(self, which), maxsize
706
707 def _close(self, which):
708 getattr(self, which).close()
709 setattr(self, which, None)
710
711 if subprocess.mswindows:
712 def send(self, input):
713 if not self.stdin:
714 return None
715
716 try:
717 x = msvcrt.get_osfhandle(self.stdin.fileno())
718 (errCode, written) = WriteFile(x, input)
719 except ValueError:
720 return self._close('stdin')
721 except (subprocess.pywintypes.error, Exception), why:
722 if why[0] in (109, errno.ESHUTDOWN):
723 return self._close('stdin')
724 raise
725
726 return written
727
728 def _recv(self, which, maxsize):
729 conn, maxsize = self.get_conn_maxsize(which, maxsize)
730 if conn is None:
731 return None
732
733 try:
734 x = msvcrt.get_osfhandle(conn.fileno())
735 (read, nAvail, nMessage) = PeekNamedPipe(x, 0)
736 if maxsize < nAvail:
737 nAvail = maxsize
738 if nAvail > 0:
739 (errCode, read) = ReadFile(x, nAvail, None)
740 except ValueError:
741 return self._close(which)
742 except (subprocess.pywintypes.error, Exception), why:
743 if why[0] in (109, errno.ESHUTDOWN):
744 return self._close(which)
745 raise
746
747 #if self.universal_newlines:
748 # read = self._translate_newlines(read)
749 return read
750
751 else:
752 def send(self, input):
753 if not self.stdin:
754 return None
755
756 if not select.select([], [self.stdin], [], 0)[1]:
757 return 0
758
759 try:
760 written = os.write(self.stdin.fileno(), input)
761 except OSError, why:
762 if why[0] == errno.EPIPE: #broken pipe
763 return self._close('stdin')
764 raise
765
766 return written
767
768 def _recv(self, which, maxsize):
769 conn, maxsize = self.get_conn_maxsize(which, maxsize)
770 if conn is None:
771 return None
772
773 try:
774 flags = fcntl.fcntl(conn, fcntl.F_GETFL)
775 except TypeError:
776 flags = None
777 else:
778 if not conn.closed:
779 fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
780
781 try:
782 if not select.select([conn], [], [], 0)[0]:
783 return ''
784
785 r = conn.read(maxsize)
786 if not r:
787 return self._close(which)
788
789 #if self.universal_newlines:
790 # r = self._translate_newlines(r)
791 return r
792 finally:
793 if not conn.closed and not flags is None:
794 fcntl.fcntl(conn, fcntl.F_SETFL, flags)
795
796 disconnect_message = "Other end disconnected!"
797
798 def recv_some(p, t=.1, e=1, tr=5, stderr=0):
799 if tr < 1:
800 tr = 1
801 x = time.time()+t
802 y = []
803 r = ''
804 pr = p.recv
805 if stderr:
806 pr = p.recv_err
807 while time.time() < x or r:
808 r = pr()
809 if r is None:
810 if e:
811 raise Exception(disconnect_message)
812 else:
813 break
814 elif r:
815 y.append(r)
816 else:
817 time.sleep(max((x-time.time())/tr, 0))
818 return ''.join(y)
819
820 # TODO(3.0: rewrite to use memoryview()
821 def send_all(p, data):
822 while len(data):
823 sent = p.send(data)
824 if sent is None:
825 raise Exception(disconnect_message)
826 data = buffer(data, sent)
827
828
829
830 try:
831 object
832 except NameError:
833 class object:
834 pass
835
836
837
838 class TestCmd(object):
839 """Class TestCmd
840 """
841
842 def __init__(self, description = None,
843 program = None,
844 interpreter = None,
845 workdir = None,
846 subdir = None,
847 verbose = None,
848 match = None,
849 diff = None,
850 combine = 0,
851 universal_newlines = 1):
852 self._cwd = os.getcwd()
853 self.description_set(description)
854 self.program_set(program)
855 self.interpreter_set(interpreter)
856 if verbose is None:
857 try:
858 verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
859 except ValueError:
860 verbose = 0
861 self.verbose_set(verbose)
862 self.combine = combine
863 self.universal_newlines = universal_newlines
864 if not match is None:
865 self.match_function = match
866 else:
867 self.match_function = match_re
868 if not diff is None:
869 self.diff_function = diff
870 else:
871 try:
872 difflib
873 except NameError:
874 pass
875 else:
876 self.diff_function = simple_diff
877 #self.diff_function = difflib.context_diff
878 #self.diff_function = difflib.unified_diff
879 self._dirlist = []
880 self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
881 if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
882 self._preserve['pass_test'] = os.environ['PRESERVE']
883 self._preserve['fail_test'] = os.environ['PRESERVE']
884 self._preserve['no_result'] = os.environ['PRESERVE']
885 else:
886 try:
887 self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
888 except KeyError:
889 pass
890 try:
891 self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
892 except KeyError:
893 pass
894 try:
895 self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
896 except KeyError:
897 pass
898 self._stdout = []
899 self._stderr = []
900 self.status = None
901 self.condition = 'no_result'
902 self.workdir_set(workdir)
903 self.subdir(subdir)
904
905 def __del__(self):
906 self.cleanup()
907
908 def __repr__(self):
909 return "%x" % id(self)
910
911 banner_char = '='
912 banner_width = 80
913
914 def banner(self, s, width=None):
915 if width is None:
916 width = self.banner_width
917 return s + self.banner_char * (width - len(s))
918
919 if os.name == 'posix':
920
921 def escape(self, arg):
922 "escape shell special characters"
923 slash = '\\'
924 special = '"$'
925
926 arg = string.replace(arg, slash, slash+slash)
927 for c in special:
928 arg = string.replace(arg, c, slash+c)
929
930 if re_space.search(arg):
931 arg = '"' + arg + '"'
932 return arg
933
934 else:
935
936 # Windows does not allow special characters in file names
937 # anyway, so no need for an escape function, we will just quote
938 # the arg.
939 def escape(self, arg):
940 if re_space.search(arg):
941 arg = '"' + arg + '"'
942 return arg
943
944 def canonicalize(self, path):
945 if is_List(path):
946 path = apply(os.path.join, tuple(path))
947 if not os.path.isabs(path):
948 path = os.path.join(self.workdir, path)
949 return path
950
951 def chmod(self, path, mode):
952 """Changes permissions on the specified file or directory
953 path name."""
954 path = self.canonicalize(path)
955 os.chmod(path, mode)
956
957 def cleanup(self, condition = None):
958 """Removes any temporary working directories for the specified
959 TestCmd environment. If the environment variable PRESERVE was
960 set when the TestCmd environment was created, temporary working
961 directories are not removed. If any of the environment variables
962 PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
963 when the TestCmd environment was created, then temporary working
964 directories are not removed if the test passed, failed, or had
965 no result, respectively. Temporary working directories are also
966 preserved for conditions specified via the preserve method.
967
968 Typically, this method is not called directly, but is used when
969 the script exits to clean up temporary working directories as
970 appropriate for the exit status.
971 """
972 if not self._dirlist:
973 return
974 os.chdir(self._cwd)
975 self.workdir = None
976 if condition is None:
977 condition = self.condition
978 if self._preserve[condition]:
979 for dir in self._dirlist:
980 print "Preserved directory", dir
981 else:
982 list = self._dirlist[:]
983 list.reverse()
984 for dir in list:
985 self.writable(dir, 1)
986 shutil.rmtree(dir, ignore_errors = 1)
987 self._dirlist = []
988
989 try:
990 global _Cleanup
991 _Cleanup.remove(self)
992 except (AttributeError, ValueError):
993 pass
994
995 def command_args(self, program = None,
996 interpreter = None,
997 arguments = None):
998 if program:
999 if type(program) == type('') and not os.path.isabs(program):
1000 program = os.path.join(self._cwd, program)
1001 else:
1002 program = self.program
1003 if not interpreter:
1004 interpreter = self.interpreter
1005 if not type(program) in [type([]), type(())]:
1006 program = [program]
1007 cmd = list(program)
1008 if interpreter:
1009 if not type(interpreter) in [type([]), type(())]:
1010 interpreter = [interpreter]
1011 cmd = list(interpreter) + cmd
1012 if arguments:
1013 if type(arguments) == type(''):
1014 arguments = string.split(arguments)
1015 cmd.extend(arguments)
1016 return cmd
1017
1018 def description_set(self, description):
1019 """Set the description of the functionality being tested.
1020 """
1021 self.description = description
1022
1023 try:
1024 difflib
1025 except NameError:
1026 def diff(self, a, b, name, *args, **kw):
1027 print self.banner('Expected %s' % name)
1028 print a
1029 print self.banner('Actual %s' % name)
1030 print b
1031 else:
1032 def diff(self, a, b, name, *args, **kw):
1033 print self.banner(name)
1034 args = (a.splitlines(), b.splitlines()) + args
1035 lines = apply(self.diff_function, args, kw)
1036 for l in lines:
1037 print l
1038
1039 def fail_test(self, condition = 1, function = None, skip = 0):
1040 """Cause the test to fail.
1041 """
1042 if not condition:
1043 return
1044 self.condition = 'fail_test'
1045 fail_test(self = self,
1046 condition = condition,
1047 function = function,
1048 skip = skip)
1049
1050 def interpreter_set(self, interpreter):
1051 """Set the program to be used to interpret the program
1052 under test as a script.
1053 """
1054 self.interpreter = interpreter
1055
1056 def match(self, lines, matches):
1057 """Compare actual and expected file contents.
1058 """
1059 return self.match_function(lines, matches)
1060
1061 def match_exact(self, lines, matches):
1062 """Compare actual and expected file contents.
1063 """
1064 return match_exact(lines, matches)
1065
1066 def match_re(self, lines, res):
1067 """Compare actual and expected file contents.
1068 """
1069 return match_re(lines, res)
1070
1071 def match_re_dotall(self, lines, res):
1072 """Compare actual and expected file contents.
1073 """
1074 return match_re_dotall(lines, res)
1075
1076 def no_result(self, condition = 1, function = None, skip = 0):
1077 """Report that the test could not be run.
1078 """
1079 if not condition:
1080 return
1081 self.condition = 'no_result'
1082 no_result(self = self,
1083 condition = condition,
1084 function = function,
1085 skip = skip)
1086
1087 def pass_test(self, condition = 1, function = None):
1088 """Cause the test to pass.
1089 """
1090 if not condition:
1091 return
1092 self.condition = 'pass_test'
1093 pass_test(self = self, condition = condition, function = function)
1094
1095 def preserve(self, *conditions):
1096 """Arrange for the temporary working directories for the
1097 specified TestCmd environment to be preserved for one or more
1098 conditions. If no conditions are specified, arranges for
1099 the temporary working directories to be preserved for all
1100 conditions.
1101 """
1102 if conditions is ():
1103 conditions = ('pass_test', 'fail_test', 'no_result')
1104 for cond in conditions:
1105 self._preserve[cond] = 1
1106
1107 def program_set(self, program):
1108 """Set the executable program or script to be tested.
1109 """
1110 if program and not os.path.isabs(program):
1111 program = os.path.join(self._cwd, program)
1112 self.program = program
1113
1114 def read(self, file, mode = 'rb'):
1115 """Reads and returns the contents of the specified file name.
1116 The file name may be a list, in which case the elements are
1117 concatenated with the os.path.join() method. The file is
1118 assumed to be under the temporary working directory unless it
1119 is an absolute path name. The I/O mode for the file may
1120 be specified; it must begin with an 'r'. The default is
1121 'rb' (binary read).
1122 """
1123 file = self.canonicalize(file)
1124 if mode[0] != 'r':
1125 raise ValueError, "mode must begin with 'r'"
1126 return open(file, mode).read()
1127
1128 def rmdir(self, dir):
1129 """Removes the specified dir name.
1130 The dir name may be a list, in which case the elements are
1131 concatenated with the os.path.join() method. The dir is
1132 assumed to be under the temporary working directory unless it
1133 is an absolute path name.
1134 The dir must be empty.
1135 """
1136 dir = self.canonicalize(dir)
1137 os.rmdir(dir)
1138
1139 def start(self, program = None,
1140 interpreter = None,
1141 arguments = None,
1142 universal_newlines = None,
1143 **kw):
1144 """
1145 Starts a program or script for the test environment.
1146
1147 The specified program will have the original directory
1148 prepended unless it is enclosed in a [list].
1149 """
1150 cmd = self.command_args(program, interpreter, arguments)
1151 cmd_string = string.join(map(self.escape, cmd), ' ')
1152 if self.verbose:
1153 sys.stderr.write(cmd_string + "\n")
1154 if universal_newlines is None:
1155 universal_newlines = self.universal_newlines
1156
1157 # On Windows, if we make stdin a pipe when we plan to send
1158 # no input, and the test program exits before
1159 # Popen calls msvcrt.open_osfhandle, that call will fail.
1160 # So don't use a pipe for stdin if we don't need one.
1161 stdin = kw.get('stdin', None)
1162 if stdin is not None:
1163 stdin = subprocess.PIPE
1164
1165 combine = kw.get('combine', self.combine)
1166 if combine:
1167 stderr_value = subprocess.STDOUT
1168 else:
1169 stderr_value = subprocess.PIPE
1170
1171 return Popen(cmd,
1172 stdin=stdin,
1173 stdout=subprocess.PIPE,
1174 stderr=stderr_value,
1175 universal_newlines=universal_newlines)
1176
1177 def finish(self, popen, **kw):
1178 """
1179 Finishes and waits for the process being run under control of
1180 the specified popen argument, recording the exit status,
1181 standard output and error output.
1182 """
1183 popen.stdin.close()
1184 self.status = popen.wait()
1185 if not self.status:
1186 self.status = 0
1187 self._stdout.append(popen.stdout.read())
1188 if popen.stderr:
1189 stderr = popen.stderr.read()
1190 else:
1191 stderr = ''
1192 self._stderr.append(stderr)
1193
1194 def run(self, program = None,
1195 interpreter = None,
1196 arguments = None,
1197 chdir = None,
1198 stdin = None,
1199 universal_newlines = None):
1200 """Runs a test of the program or script for the test
1201 environment. Standard output and error output are saved for
1202 future retrieval via the stdout() and stderr() methods.
1203
1204 The specified program will have the original directory
1205 prepended unless it is enclosed in a [list].
1206 """
1207 if chdir:
1208 oldcwd = os.getcwd()
1209 if not os.path.isabs(chdir):
1210 chdir = os.path.join(self.workpath(chdir))
1211 if self.verbose:
1212 sys.stderr.write("chdir(" + chdir + ")\n")
1213 os.chdir(chdir)
1214 p = self.start(program,
1215 interpreter,
1216 arguments,
1217 universal_newlines,
1218 stdin=stdin)
1219 if stdin:
1220 if is_List(stdin):
1221 for line in stdin:
1222 p.stdin.write(line)
1223 else:
1224 p.stdin.write(stdin)
1225 p.stdin.close()
1226
1227 out = p.stdout.read()
1228 if p.stderr is None:
1229 err = ''
1230 else:
1231 err = p.stderr.read()
1232 try:
1233 close_output = p.close_output
1234 except AttributeError:
1235 p.stdout.close()
1236 if not p.stderr is None:
1237 p.stderr.close()
1238 else:
1239 close_output()
1240
1241 self._stdout.append(out)
1242 self._stderr.append(err)
1243
1244 self.status = p.wait()
1245 if not self.status:
1246 self.status = 0
1247
1248 if chdir:
1249 os.chdir(oldcwd)
1250 if self.verbose >= 2:
1251 write = sys.stdout.write
1252 write('============ STATUS: %d\n' % self.status)
1253 out = self.stdout()
1254 if out or self.verbose >= 3:
1255 write('============ BEGIN STDOUT (len=%d):\n' % len(out))
1256 write(out)
1257 write('============ END STDOUT\n')
1258 err = self.stderr()
1259 if err or self.verbose >= 3:
1260 write('============ BEGIN STDERR (len=%d)\n' % len(err))
1261 write(err)
1262 write('============ END STDERR\n')
1263
1264 def sleep(self, seconds = default_sleep_seconds):
1265 """Sleeps at least the specified number of seconds. If no
1266 number is specified, sleeps at least the minimum number of
1267 seconds necessary to advance file time stamps on the current
1268 system. Sleeping more seconds is all right.
1269 """
1270 time.sleep(seconds)
1271
1272 def stderr(self, run = None):
1273 """Returns the error output from the specified run number.
1274 If there is no specified run number, then returns the error
1275 output of the last run. If the run number is less than zero,
1276 then returns the error output from that many runs back from the
1277 current run.
1278 """
1279 if not run:
1280 run = len(self._stderr)
1281 elif run < 0:
1282 run = len(self._stderr) + run
1283 run = run - 1
1284 return self._stderr[run]
1285
1286 def stdout(self, run = None):
1287 """Returns the standard output from the specified run number.
1288 If there is no specified run number, then returns the standard
1289 output of the last run. If the run number is less than zero,
1290 then returns the standard output from that many runs back from
1291 the current run.
1292 """
1293 if not run:
1294 run = len(self._stdout)
1295 elif run < 0:
1296 run = len(self._stdout) + run
1297 run = run - 1
1298 return self._stdout[run]
1299
1300 def subdir(self, *subdirs):
1301 """Create new subdirectories under the temporary working
1302 directory, one for each argument. An argument may be a list,
1303 in which case the list elements are concatenated using the
1304 os.path.join() method. Subdirectories multiple levels deep
1305 must be created using a separate argument for each level:
1306
1307 test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
1308
1309 Returns the number of subdirectories actually created.
1310 """
1311 count = 0
1312 for sub in subdirs:
1313 if sub is None:
1314 continue
1315 if is_List(sub):
1316 sub = apply(os.path.join, tuple(sub))
1317 new = os.path.join(self.workdir, sub)
1318 try:
1319 os.mkdir(new)
1320 except OSError:
1321 pass
1322 else:
1323 count = count + 1
1324 return count
1325
1326 def symlink(self, target, link):
1327 """Creates a symlink to the specified target.
1328 The link name may be a list, in which case the elements are
1329 concatenated with the os.path.join() method. The link is
1330 assumed to be under the temporary working directory unless it
1331 is an absolute path name. The target is *not* assumed to be
1332 under the temporary working directory.
1333 """
1334 link = self.canonicalize(link)
1335 os.symlink(target, link)
1336
1337 def tempdir(self, path=None):
1338 """Creates a temporary directory.
1339 A unique directory name is generated if no path name is specified.
1340 The directory is created, and will be removed when the TestCmd
1341 object is destroyed.
1342 """
1343 if path is None:
1344 try:
1345 path = tempfile.mktemp(prefix=tempfile.template)
1346 except TypeError:
1347 path = tempfile.mktemp()
1348 os.mkdir(path)
1349
1350 # Symlinks in the path will report things
1351 # differently from os.getcwd(), so chdir there
1352 # and back to fetch the canonical path.
1353 cwd = os.getcwd()
1354 try:
1355 os.chdir(path)
1356 path = os.getcwd()
1357 finally:
1358 os.chdir(cwd)
1359
1360 # Uppercase the drive letter since the case of drive
1361 # letters is pretty much random on win32:
1362 drive,rest = os.path.splitdrive(path)
1363 if drive:
1364 path = string.upper(drive) + rest
1365
1366 #
1367 self._dirlist.append(path)
1368 global _Cleanup
1369 try:
1370 _Cleanup.index(self)
1371 except ValueError:
1372 _Cleanup.append(self)
1373
1374 return path
1375
1376 def touch(self, path, mtime=None):
1377 """Updates the modification time on the specified file or
1378 directory path name. The default is to update to the
1379 current time if no explicit modification time is specified.
1380 """
1381 path = self.canonicalize(path)
1382 atime = os.path.getatime(path)
1383 if mtime is None:
1384 mtime = time.time()
1385 os.utime(path, (atime, mtime))
1386
1387 def unlink(self, file):
1388 """Unlinks the specified file name.
1389 The file name may be a list, in which case the elements are
1390 concatenated with the os.path.join() method. The file is
1391 assumed to be under the temporary working directory unless it
1392 is an absolute path name.
1393 """
1394 file = self.canonicalize(file)
1395 os.unlink(file)
1396
1397 def verbose_set(self, verbose):
1398 """Set the verbose level.
1399 """
1400 self.verbose = verbose
1401
1402 def where_is(self, file, path=None, pathext=None):
1403 """Find an executable file.
1404 """
1405 if is_List(file):
1406 file = apply(os.path.join, tuple(file))
1407 if not os.path.isabs(file):
1408 file = where_is(file, path, pathext)
1409 return file
1410
1411 def workdir_set(self, path):
1412 """Creates a temporary working directory with the specified
1413 path name. If the path is a null string (''), a unique
1414 directory name is created.
1415 """
1416 if (path != None):
1417 if path == '':
1418 path = None
1419 path = self.tempdir(path)
1420 self.workdir = path
1421
1422 def workpath(self, *args):
1423 """Returns the absolute path name to a subdirectory or file
1424 within the current temporary working directory. Concatenates
1425 the temporary working directory name with the specified
1426 arguments using the os.path.join() method.
1427 """
1428 return apply(os.path.join, (self.workdir,) + tuple(args))
1429
1430 def readable(self, top, read=1):
1431 """Make the specified directory tree readable (read == 1)
1432 or not (read == None).
1433
1434 This method has no effect on Windows systems, which use a
1435 completely different mechanism to control file readability.
1436 """
1437
1438 if sys.platform == 'win32':
1439 return
1440
1441 if read:
1442 def do_chmod(fname):
1443 try: st = os.stat(fname)
1444 except OSError: pass
1445 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
1446 else:
1447 def do_chmod(fname):
1448 try: st = os.stat(fname)
1449 except OSError: pass
1450 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
1451
1452 if os.path.isfile(top):
1453 # If it's a file, that's easy, just chmod it.
1454 do_chmod(top)
1455 elif read:
1456 # It's a directory and we're trying to turn on read
1457 # permission, so it's also pretty easy, just chmod the
1458 # directory and then chmod every entry on our walk down the
1459 # tree. Because os.path.walk() is top-down, we'll enable
1460 # read permission on any directories that have it disabled
1461 # before os.path.walk() tries to list their contents.
1462 do_chmod(top)
1463
1464 def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
1465 for n in names:
1466 do_chmod(os.path.join(dirname, n))
1467
1468 os.path.walk(top, chmod_entries, None)
1469 else:
1470 # It's a directory and we're trying to turn off read
1471 # permission, which means we have to chmod the directoreis
1472 # in the tree bottom-up, lest disabling read permission from
1473 # the top down get in the way of being able to get at lower
1474 # parts of the tree. But os.path.walk() visits things top
1475 # down, so we just use an object to collect a list of all
1476 # of the entries in the tree, reverse the list, and then
1477 # chmod the reversed (bottom-up) list.
1478 col = Collector(top)
1479 os.path.walk(top, col, None)
1480 col.entries.reverse()
1481 for d in col.entries: do_chmod(d)
1482
1483 def writable(self, top, write=1):
1484 """Make the specified directory tree writable (write == 1)
1485 or not (write == None).
1486 """
1487
1488 if sys.platform == 'win32':
1489
1490 if write:
1491 def do_chmod(fname):
1492 try: os.chmod(fname, stat.S_IWRITE)
1493 except OSError: pass
1494 else:
1495 def do_chmod(fname):
1496 try: os.chmod(fname, stat.S_IREAD)
1497 except OSError: pass
1498
1499 else:
1500
1501 if write:
1502 def do_chmod(fname):
1503 try: st = os.stat(fname)
1504 except OSError: pass
1505 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200))
1506 else:
1507 def do_chmod(fname):
1508 try: st = os.stat(fname)
1509 except OSError: pass
1510 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200))
1511
1512 if os.path.isfile(top):
1513 do_chmod(top)
1514 else:
1515 col = Collector(top)
1516 os.path.walk(top, col, None)
1517 for d in col.entries: do_chmod(d)
1518
1519 def executable(self, top, execute=1):
1520 """Make the specified directory tree executable (execute == 1)
1521 or not (execute == None).
1522
1523 This method has no effect on Windows systems, which use a
1524 completely different mechanism to control file executability.
1525 """
1526
1527 if sys.platform == 'win32':
1528 return
1529
1530 if execute:
1531 def do_chmod(fname):
1532 try: st = os.stat(fname)
1533 except OSError: pass
1534 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
1535 else:
1536 def do_chmod(fname):
1537 try: st = os.stat(fname)
1538 except OSError: pass
1539 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
1540
1541 if os.path.isfile(top):
1542 # If it's a file, that's easy, just chmod it.
1543 do_chmod(top)
1544 elif execute:
1545 # It's a directory and we're trying to turn on execute
1546 # permission, so it's also pretty easy, just chmod the
1547 # directory and then chmod every entry on our walk down the
1548 # tree. Because os.path.walk() is top-down, we'll enable
1549 # execute permission on any directories that have it disabled
1550 # before os.path.walk() tries to list their contents.
1551 do_chmod(top)
1552
1553 def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
1554 for n in names:
1555 do_chmod(os.path.join(dirname, n))
1556
1557 os.path.walk(top, chmod_entries, None)
1558 else:
1559 # It's a directory and we're trying to turn off execute
1560 # permission, which means we have to chmod the directories
1561 # in the tree bottom-up, lest disabling execute permission from
1562 # the top down get in the way of being able to get at lower
1563 # parts of the tree. But os.path.walk() visits things top
1564 # down, so we just use an object to collect a list of all
1565 # of the entries in the tree, reverse the list, and then
1566 # chmod the reversed (bottom-up) list.
1567 col = Collector(top)
1568 os.path.walk(top, col, None)
1569 col.entries.reverse()
1570 for d in col.entries: do_chmod(d)
1571
1572 def write(self, file, content, mode = 'wb'):
1573 """Writes the specified content text (second argument) to the
1574 specified file name (first argument). The file name may be
1575 a list, in which case the elements are concatenated with the
1576 os.path.join() method. The file is created under the temporary
1577 working directory. Any subdirectories in the path must already
1578 exist. The I/O mode for the file may be specified; it must
1579 begin with a 'w'. The default is 'wb' (binary write).
1580 """
1581 file = self.canonicalize(file)
1582 if mode[0] != 'w':
1583 raise ValueError, "mode must begin with 'w'"
1584 open(file, mode).write(content)
1585
1586 # Local Variables:
1587 # tab-width:4
1588 # indent-tabs-mode:nil
1589 # End:
1590 # vim: set expandtab tabstop=4 shiftwidth=4:
+0
-581
mozc_build_tools/gyp/test/lib/TestCommon.py less more
0 """
1 TestCommon.py: a testing framework for commands and scripts
2 with commonly useful error handling
3
4 The TestCommon module provides a simple, high-level interface for writing
5 tests of executable commands and scripts, especially commands and scripts
6 that interact with the file system. All methods throw exceptions and
7 exit on failure, with useful error messages. This makes a number of
8 explicit checks unnecessary, making the test scripts themselves simpler
9 to write and easier to read.
10
11 The TestCommon class is a subclass of the TestCmd class. In essence,
12 TestCommon is a wrapper that handles common TestCmd error conditions in
13 useful ways. You can use TestCommon directly, or subclass it for your
14 program and add additional (or override) methods to tailor it to your
15 program's specific needs. Alternatively, the TestCommon class serves
16 as a useful example of how to define your own TestCmd subclass.
17
18 As a subclass of TestCmd, TestCommon provides access to all of the
19 variables and methods from the TestCmd module. Consequently, you can
20 use any variable or method documented in the TestCmd module without
21 having to explicitly import TestCmd.
22
23 A TestCommon environment object is created via the usual invocation:
24
25 import TestCommon
26 test = TestCommon.TestCommon()
27
28 You can use all of the TestCmd keyword arguments when instantiating a
29 TestCommon object; see the TestCmd documentation for details.
30
31 Here is an overview of the methods and keyword arguments that are
32 provided by the TestCommon class:
33
34 test.must_be_writable('file1', ['file2', ...])
35
36 test.must_contain('file', 'required text\n')
37
38 test.must_contain_all_lines(output, lines, ['title', find])
39
40 test.must_contain_any_line(output, lines, ['title', find])
41
42 test.must_exist('file1', ['file2', ...])
43
44 test.must_match('file', "expected contents\n")
45
46 test.must_not_be_writable('file1', ['file2', ...])
47
48 test.must_not_contain('file', 'banned text\n')
49
50 test.must_not_contain_any_line(output, lines, ['title', find])
51
52 test.must_not_exist('file1', ['file2', ...])
53
54 test.run(options = "options to be prepended to arguments",
55 stdout = "expected standard output from the program",
56 stderr = "expected error output from the program",
57 status = expected_status,
58 match = match_function)
59
60 The TestCommon module also provides the following variables
61
62 TestCommon.python_executable
63 TestCommon.exe_suffix
64 TestCommon.obj_suffix
65 TestCommon.shobj_prefix
66 TestCommon.shobj_suffix
67 TestCommon.lib_prefix
68 TestCommon.lib_suffix
69 TestCommon.dll_prefix
70 TestCommon.dll_suffix
71
72 """
73
74 # Copyright 2000-2010 Steven Knight
75 # This module is free software, and you may redistribute it and/or modify
76 # it under the same terms as Python itself, so long as this copyright message
77 # and disclaimer are retained in their original form.
78 #
79 # IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
80 # SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
81 # THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
82 # DAMAGE.
83 #
84 # THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
85 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
86 # PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
87 # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
88 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
89
90 __author__ = "Steven Knight <knight at baldmt dot com>"
91 __revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight"
92 __version__ = "0.37"
93
94 import copy
95 import os
96 import os.path
97 import stat
98 import string
99 import sys
100 import types
101 import UserList
102
103 from TestCmd import *
104 from TestCmd import __all__
105
106 __all__.extend([ 'TestCommon',
107 'exe_suffix',
108 'obj_suffix',
109 'shobj_prefix',
110 'shobj_suffix',
111 'lib_prefix',
112 'lib_suffix',
113 'dll_prefix',
114 'dll_suffix',
115 ])
116
117 # Variables that describe the prefixes and suffixes on this system.
118 if sys.platform == 'win32':
119 exe_suffix = '.exe'
120 obj_suffix = '.obj'
121 shobj_suffix = '.obj'
122 shobj_prefix = ''
123 lib_prefix = ''
124 lib_suffix = '.lib'
125 dll_prefix = ''
126 dll_suffix = '.dll'
127 elif sys.platform == 'cygwin':
128 exe_suffix = '.exe'
129 obj_suffix = '.o'
130 shobj_suffix = '.os'
131 shobj_prefix = ''
132 lib_prefix = 'lib'
133 lib_suffix = '.a'
134 dll_prefix = ''
135 dll_suffix = '.dll'
136 elif string.find(sys.platform, 'irix') != -1:
137 exe_suffix = ''
138 obj_suffix = '.o'
139 shobj_suffix = '.o'
140 shobj_prefix = ''
141 lib_prefix = 'lib'
142 lib_suffix = '.a'
143 dll_prefix = 'lib'
144 dll_suffix = '.so'
145 elif string.find(sys.platform, 'darwin') != -1:
146 exe_suffix = ''
147 obj_suffix = '.o'
148 shobj_suffix = '.os'
149 shobj_prefix = ''
150 lib_prefix = 'lib'
151 lib_suffix = '.a'
152 dll_prefix = 'lib'
153 dll_suffix = '.dylib'
154 elif string.find(sys.platform, 'sunos') != -1:
155 exe_suffix = ''
156 obj_suffix = '.o'
157 shobj_suffix = '.os'
158 shobj_prefix = 'so_'
159 lib_prefix = 'lib'
160 lib_suffix = '.a'
161 dll_prefix = 'lib'
162 dll_suffix = '.dylib'
163 else:
164 exe_suffix = ''
165 obj_suffix = '.o'
166 shobj_suffix = '.os'
167 shobj_prefix = ''
168 lib_prefix = 'lib'
169 lib_suffix = '.a'
170 dll_prefix = 'lib'
171 dll_suffix = '.so'
172
173 def is_List(e):
174 return type(e) is types.ListType \
175 or isinstance(e, UserList.UserList)
176
177 def is_writable(f):
178 mode = os.stat(f)[stat.ST_MODE]
179 return mode & stat.S_IWUSR
180
181 def separate_files(flist):
182 existing = []
183 missing = []
184 for f in flist:
185 if os.path.exists(f):
186 existing.append(f)
187 else:
188 missing.append(f)
189 return existing, missing
190
191 if os.name == 'posix':
192 def _failed(self, status = 0):
193 if self.status is None or status is None:
194 return None
195 return _status(self) != status
196 def _status(self):
197 return self.status
198 elif os.name == 'nt':
199 def _failed(self, status = 0):
200 return not (self.status is None or status is None) and \
201 self.status != status
202 def _status(self):
203 return self.status
204
205 class TestCommon(TestCmd):
206
207 # Additional methods from the Perl Test::Cmd::Common module
208 # that we may wish to add in the future:
209 #
210 # $test->subdir('subdir', ...);
211 #
212 # $test->copy('src_file', 'dst_file');
213
214 def __init__(self, **kw):
215 """Initialize a new TestCommon instance. This involves just
216 calling the base class initialization, and then changing directory
217 to the workdir.
218 """
219 apply(TestCmd.__init__, [self], kw)
220 os.chdir(self.workdir)
221
222 def must_be_writable(self, *files):
223 """Ensures that the specified file(s) exist and are writable.
224 An individual file can be specified as a list of directory names,
225 in which case the pathname will be constructed by concatenating
226 them. Exits FAILED if any of the files does not exist or is
227 not writable.
228 """
229 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
230 existing, missing = separate_files(files)
231 unwritable = filter(lambda x, iw=is_writable: not iw(x), existing)
232 if missing:
233 print "Missing files: `%s'" % string.join(missing, "', `")
234 if unwritable:
235 print "Unwritable files: `%s'" % string.join(unwritable, "', `")
236 self.fail_test(missing + unwritable)
237
238 def must_contain(self, file, required, mode = 'rb'):
239 """Ensures that the specified file contains the required text.
240 """
241 file_contents = self.read(file, mode)
242 contains = (string.find(file_contents, required) != -1)
243 if not contains:
244 print "File `%s' does not contain required string." % file
245 print self.banner('Required string ')
246 print required
247 print self.banner('%s contents ' % file)
248 print file_contents
249 self.fail_test(not contains)
250
251 def must_contain_all_lines(self, output, lines, title=None, find=None):
252 """Ensures that the specified output string (first argument)
253 contains all of the specified lines (second argument).
254
255 An optional third argument can be used to describe the type
256 of output being searched, and only shows up in failure output.
257
258 An optional fourth argument can be used to supply a different
259 function, of the form "find(line, output), to use when searching
260 for lines in the output.
261 """
262 if find is None:
263 find = lambda o, l: string.find(o, l) != -1
264 missing = []
265 for line in lines:
266 if not find(output, line):
267 missing.append(line)
268
269 if missing:
270 if title is None:
271 title = 'output'
272 sys.stdout.write("Missing expected lines from %s:\n" % title)
273 for line in missing:
274 sys.stdout.write(' ' + repr(line) + '\n')
275 sys.stdout.write(self.banner(title + ' '))
276 sys.stdout.write(output)
277 self.fail_test()
278
279 def must_contain_any_line(self, output, lines, title=None, find=None):
280 """Ensures that the specified output string (first argument)
281 contains at least one of the specified lines (second argument).
282
283 An optional third argument can be used to describe the type
284 of output being searched, and only shows up in failure output.
285
286 An optional fourth argument can be used to supply a different
287 function, of the form "find(line, output), to use when searching
288 for lines in the output.
289 """
290 if find is None:
291 find = lambda o, l: string.find(o, l) != -1
292 for line in lines:
293 if find(output, line):
294 return
295
296 if title is None:
297 title = 'output'
298 sys.stdout.write("Missing any expected line from %s:\n" % title)
299 for line in lines:
300 sys.stdout.write(' ' + repr(line) + '\n')
301 sys.stdout.write(self.banner(title + ' '))
302 sys.stdout.write(output)
303 self.fail_test()
304
305 def must_contain_lines(self, lines, output, title=None):
306 # Deprecated; retain for backwards compatibility.
307 return self.must_contain_all_lines(output, lines, title)
308
309 def must_exist(self, *files):
310 """Ensures that the specified file(s) must exist. An individual
311 file be specified as a list of directory names, in which case the
312 pathname will be constructed by concatenating them. Exits FAILED
313 if any of the files does not exist.
314 """
315 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
316 missing = filter(lambda x: not os.path.exists(x), files)
317 if missing:
318 print "Missing files: `%s'" % string.join(missing, "', `")
319 self.fail_test(missing)
320
321 def must_match(self, file, expect, mode = 'rb'):
322 """Matches the contents of the specified file (first argument)
323 against the expected contents (second argument). The expected
324 contents are a list of lines or a string which will be split
325 on newlines.
326 """
327 file_contents = self.read(file, mode)
328 try:
329 self.fail_test(not self.match(file_contents, expect))
330 except KeyboardInterrupt:
331 raise
332 except:
333 print "Unexpected contents of `%s'" % file
334 self.diff(expect, file_contents, 'contents ')
335 raise
336
337 def must_not_contain(self, file, banned, mode = 'rb'):
338 """Ensures that the specified file doesn't contain the banned text.
339 """
340 file_contents = self.read(file, mode)
341 contains = (string.find(file_contents, banned) != -1)
342 if contains:
343 print "File `%s' contains banned string." % file
344 print self.banner('Banned string ')
345 print banned
346 print self.banner('%s contents ' % file)
347 print file_contents
348 self.fail_test(contains)
349
350 def must_not_contain_any_line(self, output, lines, title=None, find=None):
351 """Ensures that the specified output string (first argument)
352 does not contain any of the specified lines (second argument).
353
354 An optional third argument can be used to describe the type
355 of output being searched, and only shows up in failure output.
356
357 An optional fourth argument can be used to supply a different
358 function, of the form "find(line, output), to use when searching
359 for lines in the output.
360 """
361 if find is None:
362 find = lambda o, l: string.find(o, l) != -1
363 unexpected = []
364 for line in lines:
365 if find(output, line):
366 unexpected.append(line)
367
368 if unexpected:
369 if title is None:
370 title = 'output'
371 sys.stdout.write("Unexpected lines in %s:\n" % title)
372 for line in unexpected:
373 sys.stdout.write(' ' + repr(line) + '\n')
374 sys.stdout.write(self.banner(title + ' '))
375 sys.stdout.write(output)
376 self.fail_test()
377
378 def must_not_contain_lines(self, lines, output, title=None):
379 return self.must_not_contain_any_line(output, lines, title)
380
381 def must_not_exist(self, *files):
382 """Ensures that the specified file(s) must not exist.
383 An individual file be specified as a list of directory names, in
384 which case the pathname will be constructed by concatenating them.
385 Exits FAILED if any of the files exists.
386 """
387 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
388 existing = filter(os.path.exists, files)
389 if existing:
390 print "Unexpected files exist: `%s'" % string.join(existing, "', `")
391 self.fail_test(existing)
392
393
394 def must_not_be_writable(self, *files):
395 """Ensures that the specified file(s) exist and are not writable.
396 An individual file can be specified as a list of directory names,
397 in which case the pathname will be constructed by concatenating
398 them. Exits FAILED if any of the files does not exist or is
399 writable.
400 """
401 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
402 existing, missing = separate_files(files)
403 writable = filter(is_writable, existing)
404 if missing:
405 print "Missing files: `%s'" % string.join(missing, "', `")
406 if writable:
407 print "Writable files: `%s'" % string.join(writable, "', `")
408 self.fail_test(missing + writable)
409
410 def _complete(self, actual_stdout, expected_stdout,
411 actual_stderr, expected_stderr, status, match):
412 """
413 Post-processes running a subcommand, checking for failure
414 status and displaying output appropriately.
415 """
416 if _failed(self, status):
417 expect = ''
418 if status != 0:
419 expect = " (expected %s)" % str(status)
420 print "%s returned %s%s" % (self.program, str(_status(self)), expect)
421 print self.banner('STDOUT ')
422 print actual_stdout
423 print self.banner('STDERR ')
424 print actual_stderr
425 self.fail_test()
426 if not expected_stdout is None and not match(actual_stdout, expected_stdout):
427 self.diff(expected_stdout, actual_stdout, 'STDOUT ')
428 if actual_stderr:
429 print self.banner('STDERR ')
430 print actual_stderr
431 self.fail_test()
432 if not expected_stderr is None and not match(actual_stderr, expected_stderr):
433 print self.banner('STDOUT ')
434 print actual_stdout
435 self.diff(expected_stderr, actual_stderr, 'STDERR ')
436 self.fail_test()
437
438 def start(self, program = None,
439 interpreter = None,
440 arguments = None,
441 universal_newlines = None,
442 **kw):
443 """
444 Starts a program or script for the test environment.
445
446 This handles the "options" keyword argument and exceptions.
447 """
448 try:
449 options = kw['options']
450 del kw['options']
451 except KeyError:
452 pass
453 else:
454 if options:
455 if arguments is None:
456 arguments = options
457 else:
458 arguments = options + " " + arguments
459 try:
460 return apply(TestCmd.start,
461 (self, program, interpreter, arguments, universal_newlines),
462 kw)
463 except KeyboardInterrupt:
464 raise
465 except Exception, e:
466 print self.banner('STDOUT ')
467 try:
468 print self.stdout()
469 except IndexError:
470 pass
471 print self.banner('STDERR ')
472 try:
473 print self.stderr()
474 except IndexError:
475 pass
476 cmd_args = self.command_args(program, interpreter, arguments)
477 sys.stderr.write('Exception trying to execute: %s\n' % cmd_args)
478 raise e
479
480 def finish(self, popen, stdout = None, stderr = '', status = 0, **kw):
481 """
482 Finishes and waits for the process being run under control of
483 the specified popen argument. Additional arguments are similar
484 to those of the run() method:
485
486 stdout The expected standard output from
487 the command. A value of None means
488 don't test standard output.
489
490 stderr The expected error output from
491 the command. A value of None means
492 don't test error output.
493
494 status The expected exit status from the
495 command. A value of None means don't
496 test exit status.
497 """
498 apply(TestCmd.finish, (self, popen,), kw)
499 match = kw.get('match', self.match)
500 self._complete(self.stdout(), stdout,
501 self.stderr(), stderr, status, match)
502
503 def run(self, options = None, arguments = None,
504 stdout = None, stderr = '', status = 0, **kw):
505 """Runs the program under test, checking that the test succeeded.
506
507 The arguments are the same as the base TestCmd.run() method,
508 with the addition of:
509
510 options Extra options that get appended to the beginning
511 of the arguments.
512
513 stdout The expected standard output from
514 the command. A value of None means
515 don't test standard output.
516
517 stderr The expected error output from
518 the command. A value of None means
519 don't test error output.
520
521 status The expected exit status from the
522 command. A value of None means don't
523 test exit status.
524
525 By default, this expects a successful exit (status = 0), does
526 not test standard output (stdout = None), and expects that error
527 output is empty (stderr = "").
528 """
529 if options:
530 if arguments is None:
531 arguments = options
532 else:
533 arguments = options + " " + arguments
534 kw['arguments'] = arguments
535 try:
536 match = kw['match']
537 del kw['match']
538 except KeyError:
539 match = self.match
540 apply(TestCmd.run, [self], kw)
541 self._complete(self.stdout(), stdout,
542 self.stderr(), stderr, status, match)
543
544 def skip_test(self, message="Skipping test.\n"):
545 """Skips a test.
546
547 Proper test-skipping behavior is dependent on the external
548 TESTCOMMON_PASS_SKIPS environment variable. If set, we treat
549 the skip as a PASS (exit 0), and otherwise treat it as NO RESULT.
550 In either case, we print the specified message as an indication
551 that the substance of the test was skipped.
552
553 (This was originally added to support development under Aegis.
554 Technically, skipping a test is a NO RESULT, but Aegis would
555 treat that as a test failure and prevent the change from going to
556 the next step. Since we ddn't want to force anyone using Aegis
557 to have to install absolutely every tool used by the tests, we
558 would actually report to Aegis that a skipped test has PASSED
559 so that the workflow isn't held up.)
560 """
561 if message:
562 sys.stdout.write(message)
563 sys.stdout.flush()
564 pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS')
565 if pass_skips in [None, 0, '0']:
566 # skip=1 means skip this function when showing where this
567 # result came from. They only care about the line where the
568 # script called test.skip_test(), not the line number where
569 # we call test.no_result().
570 self.no_result(skip=1)
571 else:
572 # We're under the development directory for this change,
573 # so this is an Aegis invocation; pass the test (exit 0).
574 self.pass_test()
575
576 # Local Variables:
577 # tab-width:4
578 # indent-tabs-mode:nil
579 # End:
580 # vim: set expandtab tabstop=4 shiftwidth=4:
+0
-686
mozc_build_tools/gyp/test/lib/TestGyp.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 TestGyp.py: a testing framework for GYP integration tests.
8 """
9
10 import os
11 import re
12 import shutil
13 import stat
14 import sys
15
16 import TestCommon
17 from TestCommon import __all__
18
19 __all__.extend([
20 'TestGyp',
21 ])
22
23
24 class TestGypBase(TestCommon.TestCommon):
25 """
26 Class for controlling end-to-end tests of gyp generators.
27
28 Instantiating this class will create a temporary directory and
29 arrange for its destruction (via the TestCmd superclass) and
30 copy all of the non-gyptest files in the directory hierarchy of the
31 executing script.
32
33 The default behavior is to test the 'gyp' or 'gyp.bat' file in the
34 current directory. An alternative may be specified explicitly on
35 instantiation, or by setting the TESTGYP_GYP environment variable.
36
37 This class should be subclassed for each supported gyp generator
38 (format). Various abstract methods below define calling signatures
39 used by the test scripts to invoke builds on the generated build
40 configuration and to run executables generated by those builds.
41 """
42
43 build_tool = None
44 build_tool_list = []
45
46 _exe = TestCommon.exe_suffix
47 _obj = TestCommon.obj_suffix
48 shobj_ = TestCommon.shobj_prefix
49 _shobj = TestCommon.shobj_suffix
50 lib_ = TestCommon.lib_prefix
51 _lib = TestCommon.lib_suffix
52 dll_ = TestCommon.dll_prefix
53 _dll = TestCommon.dll_suffix
54
55 # Constants to represent different targets.
56 ALL = '__all__'
57 DEFAULT = '__default__'
58
59 # Constants for different target types.
60 EXECUTABLE = '__executable__'
61 STATIC_LIB = '__static_lib__'
62 SHARED_LIB = '__shared_lib__'
63
64 def __init__(self, gyp=None, *args, **kw):
65 self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
66
67 if not gyp:
68 gyp = os.environ.get('TESTGYP_GYP')
69 if not gyp:
70 if sys.platform == 'win32':
71 gyp = 'gyp.bat'
72 else:
73 gyp = 'gyp'
74 self.gyp = os.path.abspath(gyp)
75
76 self.initialize_build_tool()
77
78 if not kw.has_key('match'):
79 kw['match'] = TestCommon.match_exact
80
81 if not kw.has_key('workdir'):
82 # Default behavior: the null string causes TestCmd to create
83 # a temporary directory for us.
84 kw['workdir'] = ''
85
86 formats = kw.get('formats', [])
87 if kw.has_key('formats'):
88 del kw['formats']
89
90 super(TestGypBase, self).__init__(*args, **kw)
91
92 excluded_formats = set([f for f in formats if f[0] == '!'])
93 included_formats = set(formats) - excluded_formats
94 if ('!'+self.format in excluded_formats or
95 included_formats and self.format not in included_formats):
96 msg = 'Invalid test for %r format; skipping test.\n'
97 self.skip_test(msg % self.format)
98
99 self.copy_test_configuration(self.origin_cwd, self.workdir)
100 self.set_configuration(None)
101
102 def built_file_must_exist(self, name, type=None, **kw):
103 """
104 Fails the test if the specified built file name does not exist.
105 """
106 return self.must_exist(self.built_file_path(name, type, **kw))
107
108 def built_file_must_not_exist(self, name, type=None, **kw):
109 """
110 Fails the test if the specified built file name exists.
111 """
112 return self.must_not_exist(self.built_file_path(name, type, **kw))
113
114 def built_file_must_match(self, name, contents, **kw):
115 """
116 Fails the test if the contents of the specified built file name
117 do not match the specified contents.
118 """
119 return self.must_match(self.built_file_path(name, **kw), contents)
120
121 def built_file_must_not_match(self, name, contents, **kw):
122 """
123 Fails the test if the contents of the specified built file name
124 match the specified contents.
125 """
126 return self.must_not_match(self.built_file_path(name, **kw), contents)
127
128 def copy_test_configuration(self, source_dir, dest_dir):
129 """
130 Copies the test configuration from the specified source_dir
131 (the directory in which the test script lives) to the
132 specified dest_dir (a temporary working directory).
133
134 This ignores all files and directories that begin with
135 the string 'gyptest', and all '.svn' subdirectories.
136 """
137 for root, dirs, files in os.walk(source_dir):
138 if '.svn' in dirs:
139 dirs.remove('.svn')
140 dirs = [ d for d in dirs if not d.startswith('gyptest') ]
141 files = [ f for f in files if not f.startswith('gyptest') ]
142 for dirname in dirs:
143 source = os.path.join(root, dirname)
144 destination = source.replace(source_dir, dest_dir)
145 os.mkdir(destination)
146 if sys.platform != 'win32':
147 shutil.copystat(source, destination)
148 for filename in files:
149 source = os.path.join(root, filename)
150 destination = source.replace(source_dir, dest_dir)
151 shutil.copy2(source, destination)
152
153 def initialize_build_tool(self):
154 """
155 Initializes the .build_tool attribute.
156
157 Searches the .build_tool_list for an executable name on the user's
158 $PATH. The first tool on the list is used as-is if nothing is found
159 on the current $PATH.
160 """
161 for build_tool in self.build_tool_list:
162 if not build_tool:
163 continue
164 if os.path.isabs(build_tool):
165 self.build_tool = build_tool
166 return
167 build_tool = self.where_is(build_tool)
168 if build_tool:
169 self.build_tool = build_tool
170 return
171
172 if self.build_tool_list:
173 self.build_tool = self.build_tool_list[0]
174
175 def relocate(self, source, destination):
176 """
177 Renames (relocates) the specified source (usually a directory)
178 to the specified destination, creating the destination directory
179 first if necessary.
180
181 Note: Don't use this as a generic "rename" operation. In the
182 future, "relocating" parts of a GYP tree may affect the state of
183 the test to modify the behavior of later method calls.
184 """
185 destination_dir = os.path.dirname(destination)
186 if not os.path.exists(destination_dir):
187 self.subdir(destination_dir)
188 os.rename(source, destination)
189
190 def report_not_up_to_date(self):
191 """
192 Reports that a build is not up-to-date.
193
194 This provides common reporting for formats that have complicated
195 conditions for checking whether a build is up-to-date. Formats
196 that expect exact output from the command (make, scons) can
197 just set stdout= when they call the run_build() method.
198 """
199 print "Build is not up-to-date:"
200 print self.banner('STDOUT ')
201 print self.stdout()
202 stderr = self.stderr()
203 if stderr:
204 print self.banner('STDERR ')
205 print stderr
206
207 def run_gyp(self, gyp_file, *args, **kw):
208 """
209 Runs gyp against the specified gyp_file with the specified args.
210 """
211 # TODO: --depth=. works around Chromium-specific tree climbing.
212 args = ('--depth=.', '--format='+self.format, gyp_file) + args
213 return self.run(program=self.gyp, arguments=args, **kw)
214
215 def run(self, *args, **kw):
216 """
217 Executes a program by calling the superclass .run() method.
218
219 This exists to provide a common place to filter out keyword
220 arguments implemented in this layer, without having to update
221 the tool-specific subclasses or clutter the tests themselves
222 with platform-specific code.
223 """
224 if kw.has_key('SYMROOT'):
225 del kw['SYMROOT']
226 super(TestGypBase, self).run(*args, **kw)
227
228 def set_configuration(self, configuration):
229 """
230 Sets the configuration, to be used for invoking the build
231 tool and testing potential built output.
232 """
233 self.configuration = configuration
234
235 def configuration_dirname(self):
236 if self.configuration:
237 return self.configuration.split('|')[0]
238 else:
239 return 'Default'
240
241 def configuration_buildname(self):
242 if self.configuration:
243 return self.configuration
244 else:
245 return 'Default'
246
247 #
248 # Abstract methods to be defined by format-specific subclasses.
249 #
250
251 def build(self, gyp_file, target=None, **kw):
252 """
253 Runs a build of the specified target against the configuration
254 generated from the specified gyp_file.
255
256 A 'target' argument of None or the special value TestGyp.DEFAULT
257 specifies the default argument for the underlying build tool.
258 A 'target' argument of TestGyp.ALL specifies the 'all' target
259 (if any) of the underlying build tool.
260 """
261 raise NotImplementedError
262
263 def built_file_path(self, name, type=None, **kw):
264 """
265 Returns a path to the specified file name, of the specified type.
266 """
267 raise NotImplementedError
268
269 def built_file_basename(self, name, type=None, **kw):
270 """
271 Returns the base name of the specified file name, of the specified type.
272
273 A bare=True keyword argument specifies that prefixes and suffixes shouldn't
274 be applied.
275 """
276 if not kw.get('bare'):
277 if type == self.EXECUTABLE:
278 name = name + self._exe
279 elif type == self.STATIC_LIB:
280 name = self.lib_ + name + self._lib
281 elif type == self.SHARED_LIB:
282 name = self.dll_ + name + self._dll
283 return name
284
285 def run_built_executable(self, name, *args, **kw):
286 """
287 Runs an executable program built from a gyp-generated configuration.
288
289 The specified name should be independent of any particular generator.
290 Subclasses should find the output executable in the appropriate
291 output build directory, tack on any necessary executable suffix, etc.
292 """
293 raise NotImplementedError
294
295 def up_to_date(self, gyp_file, target=None, **kw):
296 """
297 Verifies that a build of the specified target is up to date.
298
299 The subclass should implement this by calling build()
300 (or a reasonable equivalent), checking whatever conditions
301 will tell it the build was an "up to date" null build, and
302 failing if it isn't.
303 """
304 raise NotImplementedError
305
306
307 class TestGypGypd(TestGypBase):
308 """
309 Subclass for testing the GYP 'gypd' generator (spit out the
310 internal data structure as pretty-printed Python).
311 """
312 format = 'gypd'
313
314
315 class TestGypMake(TestGypBase):
316 """
317 Subclass for testing the GYP Make generator.
318 """
319 format = 'make'
320 build_tool_list = ['make']
321 ALL = 'all'
322 def build(self, gyp_file, target=None, **kw):
323 """
324 Runs a Make build using the Makefiles generated from the specified
325 gyp_file.
326 """
327 arguments = kw.get('arguments', [])[:]
328 if self.configuration:
329 arguments.append('BUILDTYPE=' + self.configuration)
330 if target not in (None, self.DEFAULT):
331 arguments.append(target)
332 # Sub-directory builds provide per-gyp Makefiles (i.e.
333 # Makefile.gyp_filename), so use that if there is no Makefile.
334 chdir = kw.get('chdir', '')
335 if not os.path.exists(os.path.join(chdir, 'Makefile')):
336 print "NO Makefile in " + os.path.join(chdir, 'Makefile')
337 arguments.insert(0, '-f')
338 arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile')
339 kw['arguments'] = arguments
340 return self.run(program=self.build_tool, **kw)
341 def up_to_date(self, gyp_file, target=None, **kw):
342 """
343 Verifies that a build of the specified Make target is up to date.
344 """
345 if target in (None, self.DEFAULT):
346 message_target = 'all'
347 else:
348 message_target = target
349 kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target
350 return self.build(gyp_file, target, **kw)
351 def run_built_executable(self, name, *args, **kw):
352 """
353 Runs an executable built by Make.
354 """
355 configuration = self.configuration_dirname()
356 libdir = os.path.join('out', configuration, 'lib')
357 # TODO(piman): when everything is cross-compile safe, remove lib.target
358 os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
359 # Enclosing the name in a list avoids prepending the original dir.
360 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
361 return self.run(program=program, *args, **kw)
362 def built_file_path(self, name, type=None, **kw):
363 """
364 Returns a path to the specified file name, of the specified type,
365 as built by Make.
366
367 Built files are in the subdirectory 'out/{configuration}'.
368 The default is 'out/Default'.
369
370 A chdir= keyword argument specifies the source directory
371 relative to which the output subdirectory can be found.
372
373 "type" values of STATIC_LIB or SHARED_LIB append the necessary
374 prefixes and suffixes to a platform-independent library base name.
375
376 A libdir= keyword argument specifies a library subdirectory other
377 than the default 'obj.target'.
378 """
379 result = []
380 chdir = kw.get('chdir')
381 if chdir:
382 result.append(chdir)
383 configuration = self.configuration_dirname()
384 result.extend(['out', configuration])
385 if type == self.STATIC_LIB:
386 result.append(kw.get('libdir', 'obj.target'))
387 elif type == self.SHARED_LIB:
388 result.append(kw.get('libdir', 'lib.target'))
389 result.append(self.built_file_basename(name, type, **kw))
390 return self.workpath(*result)
391
392
393 class TestGypMSVS(TestGypBase):
394 """
395 Subclass for testing the GYP Visual Studio generator.
396 """
397 format = 'msvs'
398
399 u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
400 up_to_date_re = re.compile(u, re.M)
401
402 # Initial None element will indicate to our .initialize_build_tool()
403 # method below that 'devenv' was not found on %PATH%.
404 #
405 # Note: we must use devenv.com to be able to capture build output.
406 # Directly executing devenv.exe only sends output to BuildLog.htm.
407 build_tool_list = [None, 'devenv.com']
408
409 def initialize_build_tool(self):
410 """
411 Initializes the Visual Studio .build_tool parameter, searching %PATH%
412 and %PATHEXT% for a devenv.{exe,bat,...} executable, and falling
413 back to a hard-coded default (on the current drive) if necessary.
414 """
415 super(TestGypMSVS, self).initialize_build_tool()
416 if not self.build_tool:
417 # We didn't find 'devenv' on the path. Just hard-code a default,
418 # and revisit this if it becomes important.
419 possible = [
420 # Note: if you're using this, set GYP_MSVS_VERSION=2008
421 # to get the tests to pass.
422 ('C:\\Program Files (x86)',
423 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'),
424 ('C:\\Program Files',
425 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'),
426 ('C:\\Program Files (x86)',
427 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'),
428 ('C:\\Program Files',
429 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'),
430 ]
431 for build_tool in possible:
432 bt = os.path.join(*build_tool)
433 if os.path.exists(bt):
434 self.build_tool = bt
435 break
436 def build(self, gyp_file, target=None, rebuild=False, **kw):
437 """
438 Runs a Visual Studio build using the configuration generated
439 from the specified gyp_file.
440 """
441 configuration = self.configuration_buildname()
442 if rebuild:
443 build = '/Rebuild'
444 else:
445 build = '/Build'
446 arguments = kw.get('arguments', [])[:]
447 arguments.extend([gyp_file.replace('.gyp', '.sln'),
448 build, configuration])
449 # Note: the Visual Studio generator doesn't add an explicit 'all'
450 # target, so we just treat it the same as the default.
451 if target not in (None, self.ALL, self.DEFAULT):
452 arguments.extend(['/Project', target])
453 if self.configuration:
454 arguments.extend(['/ProjectConfig', self.configuration])
455 kw['arguments'] = arguments
456 return self.run(program=self.build_tool, **kw)
457 def up_to_date(self, gyp_file, target=None, **kw):
458 """
459 Verifies that a build of the specified Visual Studio target is up to date.
460 """
461 result = self.build(gyp_file, target, **kw)
462 if not result:
463 stdout = self.stdout()
464 m = self.up_to_date_re.search(stdout)
465 if not m or m.group(1) == '0':
466 self.report_not_up_to_date()
467 self.fail_test()
468 return result
469 def run_built_executable(self, name, *args, **kw):
470 """
471 Runs an executable built by Visual Studio.
472 """
473 configuration = self.configuration_dirname()
474 # Enclosing the name in a list avoids prepending the original dir.
475 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
476 return self.run(program=program, *args, **kw)
477 def built_file_path(self, name, type=None, **kw):
478 """
479 Returns a path to the specified file name, of the specified type,
480 as built by Visual Studio.
481
482 Built files are in a subdirectory that matches the configuration
483 name. The default is 'Default'.
484
485 A chdir= keyword argument specifies the source directory
486 relative to which the output subdirectory can be found.
487
488 "type" values of STATIC_LIB or SHARED_LIB append the necessary
489 prefixes and suffixes to a platform-independent library base name.
490 """
491 result = []
492 chdir = kw.get('chdir')
493 if chdir:
494 result.append(chdir)
495 result.append(self.configuration_dirname())
496 if type == self.STATIC_LIB:
497 result.append('lib')
498 result.append(self.built_file_basename(name, type, **kw))
499 return self.workpath(*result)
500
501
502 class TestGypSCons(TestGypBase):
503 """
504 Subclass for testing the GYP SCons generator.
505 """
506 format = 'scons'
507 build_tool_list = ['scons', 'scons.py']
508 ALL = 'all'
509 def build(self, gyp_file, target=None, **kw):
510 """
511 Runs a scons build using the SCons configuration generated from the
512 specified gyp_file.
513 """
514 arguments = kw.get('arguments', [])[:]
515 dirname = os.path.dirname(gyp_file)
516 if dirname:
517 arguments.extend(['-C', dirname])
518 if self.configuration:
519 arguments.append('--mode=' + self.configuration)
520 if target not in (None, self.DEFAULT):
521 arguments.append(target)
522 kw['arguments'] = arguments
523 return self.run(program=self.build_tool, **kw)
524 def up_to_date(self, gyp_file, target=None, **kw):
525 """
526 Verifies that a build of the specified SCons target is up to date.
527 """
528 if target in (None, self.DEFAULT):
529 up_to_date_targets = 'all'
530 else:
531 up_to_date_targets = target
532 up_to_date_lines = []
533 for arg in up_to_date_targets.split():
534 up_to_date_lines.append("scons: `%s' is up to date.\n" % arg)
535 kw['stdout'] = ''.join(up_to_date_lines)
536 arguments = kw.get('arguments', [])[:]
537 arguments.append('-Q')
538 kw['arguments'] = arguments
539 return self.build(gyp_file, target, **kw)
540 def run_built_executable(self, name, *args, **kw):
541 """
542 Runs an executable built by scons.
543 """
544 configuration = self.configuration_dirname()
545 os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib')
546 # Enclosing the name in a list avoids prepending the original dir.
547 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
548 return self.run(program=program, *args, **kw)
549 def built_file_path(self, name, type=None, **kw):
550 """
551 Returns a path to the specified file name, of the specified type,
552 as built by Scons.
553
554 Built files are in a subdirectory that matches the configuration
555 name. The default is 'Default'.
556
557 A chdir= keyword argument specifies the source directory
558 relative to which the output subdirectory can be found.
559
560 "type" values of STATIC_LIB or SHARED_LIB append the necessary
561 prefixes and suffixes to a platform-independent library base name.
562 """
563 result = []
564 chdir = kw.get('chdir')
565 if chdir:
566 result.append(chdir)
567 result.append(self.configuration_dirname())
568 if type in (self.STATIC_LIB, self.SHARED_LIB):
569 result.append('lib')
570 result.append(self.built_file_basename(name, type, **kw))
571 return self.workpath(*result)
572
573
574 class TestGypXcode(TestGypBase):
575 """
576 Subclass for testing the GYP Xcode generator.
577 """
578 format = 'xcode'
579 build_tool_list = ['xcodebuild']
580
581 phase_script_execution = ("\n"
582 "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
583 " cd /\\S+\n"
584 " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
585 "(make: Nothing to be done for `all'\\.\n)?")
586
587 strip_up_to_date_expressions = [
588 # Various actions or rules can run even when the overall build target
589 # is up to date. Strip those phases' GYP-generated output.
590 re.compile(phase_script_execution, re.S),
591
592 # The message from distcc_pump can trail the "BUILD SUCCEEDED"
593 # message, so strip that, too.
594 re.compile('__________Shutting down distcc-pump include server\n', re.S),
595 ]
596
597 up_to_date_ending = 'Checking Dependencies...\n** BUILD SUCCEEDED **\n'
598
599 def build(self, gyp_file, target=None, **kw):
600 """
601 Runs an xcodebuild using the .xcodeproj generated from the specified
602 gyp_file.
603 """
604 # Be sure we're working with a copy of 'arguments' since we modify it.
605 # The caller may not be expecting it to be modified.
606 arguments = kw.get('arguments', [])[:]
607 arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
608 if target == self.ALL:
609 arguments.append('-alltargets',)
610 elif target not in (None, self.DEFAULT):
611 arguments.extend(['-target', target])
612 if self.configuration:
613 arguments.extend(['-configuration', self.configuration])
614 symroot = kw.get('SYMROOT', '$SRCROOT/build')
615 if symroot:
616 arguments.append('SYMROOT='+symroot)
617 kw['arguments'] = arguments
618 return self.run(program=self.build_tool, **kw)
619 def up_to_date(self, gyp_file, target=None, **kw):
620 """
621 Verifies that a build of the specified Xcode target is up to date.
622 """
623 result = self.build(gyp_file, target, **kw)
624 if not result:
625 output = self.stdout()
626 for expression in self.strip_up_to_date_expressions:
627 output = expression.sub('', output)
628 if not output.endswith(self.up_to_date_ending):
629 self.report_not_up_to_date()
630 self.fail_test()
631 return result
632 def run_built_executable(self, name, *args, **kw):
633 """
634 Runs an executable built by xcodebuild.
635 """
636 configuration = self.configuration_dirname()
637 os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
638 # Enclosing the name in a list avoids prepending the original dir.
639 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
640 return self.run(program=program, *args, **kw)
641 def built_file_path(self, name, type=None, **kw):
642 """
643 Returns a path to the specified file name, of the specified type,
644 as built by Xcode.
645
646 Built files are in the subdirectory 'build/{configuration}'.
647 The default is 'build/Default'.
648
649 A chdir= keyword argument specifies the source directory
650 relative to which the output subdirectory can be found.
651
652 "type" values of STATIC_LIB or SHARED_LIB append the necessary
653 prefixes and suffixes to a platform-independent library base name.
654 """
655 result = []
656 chdir = kw.get('chdir')
657 if chdir:
658 result.append(chdir)
659 configuration = self.configuration_dirname()
660 result.extend(['build', configuration])
661 result.append(self.built_file_basename(name, type, **kw))
662 return self.workpath(*result)
663
664
665 format_class_list = [
666 TestGypGypd,
667 TestGypMake,
668 TestGypMSVS,
669 TestGypSCons,
670 TestGypXcode,
671 ]
672
673 def TestGyp(*args, **kw):
674 """
675 Returns an appropriate TestGyp* instance for a specified GYP format.
676 """
677 format = kw.get('format')
678 if format:
679 del kw['format']
680 else:
681 format = os.environ.get('TESTGYP_FORMAT')
682 for format_class in format_class_list:
683 if format == format_class.format:
684 return format_class(*args, **kw)
685 raise Exception, "unknown format %r" % format
+0
-84
mozc_build_tools/gyp/test/library/gyptest-shared.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with shared libraries,
8 including verifying that libraries are rebuilt correctly when functions
9 move between libraries.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('library.gyp',
17 '-Dlibrary=shared_library',
18 '-Dmoveable_function=lib1',
19 chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23 test.build('library.gyp', test.ALL, chdir='relocate/src')
24
25 expect = """\
26 Hello from program.c
27 Hello from lib1.c
28 Hello from lib2.c
29 Hello from lib1_moveable.c
30 """
31 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
32
33
34 test.run_gyp('library.gyp',
35 '-Dlibrary=shared_library',
36 '-Dmoveable_function=lib2',
37 chdir='relocate/src')
38
39 # Update program.c to force a rebuild.
40 test.sleep()
41 contents = test.read('relocate/src/program.c')
42 contents = contents.replace('Hello', 'Hello again')
43 test.write('relocate/src/program.c', contents)
44
45 test.build('library.gyp', test.ALL, chdir='relocate/src')
46
47 expect = """\
48 Hello again from program.c
49 Hello from lib1.c
50 Hello from lib2.c
51 Hello from lib2_moveable.c
52 """
53 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
54
55
56 test.run_gyp('library.gyp',
57 '-Dlibrary=shared_library',
58 '-Dmoveable_function=lib1',
59 chdir='relocate/src')
60
61 # Update program.c to force a rebuild.
62 test.sleep()
63 contents = test.read('relocate/src/program.c')
64 contents = contents.replace('again', 'again again')
65 test.write('relocate/src/program.c', contents)
66
67 # TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
68 # the "moved" module. This should be done in gyp by adding a dependency
69 # on the generated .vcproj file itself.
70 test.touch('relocate/src/lib2.c')
71
72 test.build('library.gyp', test.ALL, chdir='relocate/src')
73
74 expect = """\
75 Hello again again from program.c
76 Hello from lib1.c
77 Hello from lib2.c
78 Hello from lib1_moveable.c
79 """
80 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
81
82
83 test.pass_test()
+0
-84
mozc_build_tools/gyp/test/library/gyptest-static.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with static libraries,
8 including verifying that libraries are rebuilt correctly when functions
9 move between libraries.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('library.gyp',
17 '-Dlibrary=static_library',
18 '-Dmoveable_function=lib1',
19 chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23 test.build('library.gyp', test.ALL, chdir='relocate/src')
24
25 expect = """\
26 Hello from program.c
27 Hello from lib1.c
28 Hello from lib2.c
29 Hello from lib1_moveable.c
30 """
31 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
32
33
34 test.run_gyp('library.gyp',
35 '-Dlibrary=static_library',
36 '-Dmoveable_function=lib2',
37 chdir='relocate/src')
38
39 # Update program.c to force a rebuild.
40 test.sleep()
41 contents = test.read('relocate/src/program.c')
42 contents = contents.replace('Hello', 'Hello again')
43 test.write('relocate/src/program.c', contents)
44
45 test.build('library.gyp', test.ALL, chdir='relocate/src')
46
47 expect = """\
48 Hello again from program.c
49 Hello from lib1.c
50 Hello from lib2.c
51 Hello from lib2_moveable.c
52 """
53 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
54
55
56 test.run_gyp('library.gyp',
57 '-Dlibrary=static_library',
58 '-Dmoveable_function=lib1',
59 chdir='relocate/src')
60
61 # Update program.c and lib2.c to force a rebuild.
62 test.sleep()
63 contents = test.read('relocate/src/program.c')
64 contents = contents.replace('again', 'again again')
65 test.write('relocate/src/program.c', contents)
66
67 # TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
68 # the "moved" module. This should be done in gyp by adding a dependency
69 # on the generated .vcproj file itself.
70 test.touch('relocate/src/lib2.c')
71
72 test.build('library.gyp', test.ALL, chdir='relocate/src')
73
74 expect = """\
75 Hello again again from program.c
76 Hello from lib1.c
77 Hello from lib2.c
78 Hello from lib1_moveable.c
79 """
80 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
81
82
83 test.pass_test()
+0
-10
mozc_build_tools/gyp/test/library/src/lib1.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void lib1_function(void)
6 {
7 fprintf(stdout, "Hello from lib1.c\n");
8 fflush(stdout);
9 }
+0
-10
mozc_build_tools/gyp/test/library/src/lib1_moveable.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void moveable_function(void)
6 {
7 fprintf(stdout, "Hello from lib1_moveable.c\n");
8 fflush(stdout);
9 }
+0
-10
mozc_build_tools/gyp/test/library/src/lib2.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void lib2_function(void)
6 {
7 fprintf(stdout, "Hello from lib2.c\n");
8 fflush(stdout);
9 }
+0
-10
mozc_build_tools/gyp/test/library/src/lib2_moveable.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void moveable_function(void)
6 {
7 fprintf(stdout, "Hello from lib2_moveable.c\n");
8 fflush(stdout);
9 }
+0
-58
mozc_build_tools/gyp/test/library/src/library.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'moveable_function%': 0,
7 },
8 'targets': [
9 {
10 'target_name': 'program',
11 'type': 'executable',
12 'dependencies': [
13 'lib1',
14 'lib2',
15 ],
16 'sources': [
17 'program.c',
18 ],
19 },
20 {
21 'target_name': 'lib1',
22 'type': '<(library)',
23 'sources': [
24 'lib1.c',
25 ],
26 'conditions': [
27 ['moveable_function=="lib1"', {
28 'sources': [
29 'lib1_moveable.c',
30 ],
31 }],
32 ],
33 },
34 {
35 'target_name': 'lib2',
36 'type': '<(library)',
37 'sources': [
38 'lib2.c',
39 ],
40 'conditions': [
41 ['moveable_function=="lib2"', {
42 'sources': [
43 'lib2_moveable.c',
44 ],
45 }],
46 ],
47 },
48 ],
49 'conditions': [
50 ['OS=="linux"', {
51 'target_defaults': {
52 # Support 64-bit shared libs (also works fine for 32-bit).
53 'cflags': ['-fPIC'],
54 },
55 }],
56 ],
57 }
+0
-15
mozc_build_tools/gyp/test/library/src/program.c less more
0 #include <stdio.h>
1
2 extern void lib1_function(void);
3 extern void lib2_function(void);
4 extern void moveable_function(void);
5
6 int main(int argc, char *argv[])
7 {
8 fprintf(stdout, "Hello from program.c\n");
9 fflush(stdout);
10 lib1_function();
11 lib2_function();
12 moveable_function();
13 return 0;
14 }
+0
-28
mozc_build_tools/gyp/test/module/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with loadable modules. The
8 default for all platforms should be to output the loadable modules to the same
9 path as the executable.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('module.gyp', chdir='src')
17
18 test.build('module.gyp', test.ALL, chdir='src')
19
20 expect = """\
21 Hello from program.c
22 Hello from lib1.c
23 Hello from lib2.c
24 """
25 test.run_built_executable('program', chdir='src', stdout=expect)
26
27 test.pass_test()
+0
-10
mozc_build_tools/gyp/test/module/src/lib1.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void module_main(void)
6 {
7 fprintf(stdout, "Hello from lib1.c\n");
8 fflush(stdout);
9 }
+0
-10
mozc_build_tools/gyp/test/module/src/lib2.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void module_main(void)
6 {
7 fprintf(stdout, "Hello from lib2.c\n");
8 fflush(stdout);
9 }
+0
-55
mozc_build_tools/gyp/test/module/src/module.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'conditions': [
7 ['OS=="win"', {
8 'defines': ['PLATFORM_WIN'],
9 }],
10 ['OS=="mac"', {
11 'defines': ['PLATFORM_MAC'],
12 }],
13 ['OS=="linux"', {
14 'defines': ['PLATFORM_LINUX'],
15 # Support 64-bit shared libs (also works fine for 32-bit).
16 'cflags': ['-fPIC'],
17 'ldflags': ['-ldl'],
18 }],
19 ],
20 },
21 'targets': [
22 {
23 'target_name': 'program',
24 'type': 'executable',
25 'dependencies': [
26 'lib1',
27 'lib2',
28 ],
29 'sources': [
30 'program.c',
31 ],
32 },
33 {
34 'target_name': 'lib1',
35 'type': 'loadable_module',
36 'product_name': 'lib1',
37 'product_prefix': '',
38 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
39 'sources': [
40 'lib1.c',
41 ],
42 },
43 {
44 'target_name': 'lib2',
45 'product_name': 'lib2',
46 'product_prefix': '',
47 'type': 'loadable_module',
48 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
49 'sources': [
50 'lib2.c',
51 ],
52 },
53 ],
54 }
+0
-111
mozc_build_tools/gyp/test/module/src/program.c less more
0 #include <stdio.h>
1 #include <stdlib.h>
2
3 #if defined(PLATFORM_WIN)
4 #include <windows.h>
5 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
6 #include <dlfcn.h>
7 #include <libgen.h>
8 #include <string.h>
9 #include <sys/param.h>
10 #define MAX_PATH PATH_MAX
11 #endif
12
13 #if defined(PLATFORM_WIN)
14 #define MODULE_SUFFIX ".dll"
15 #elif defined(PLATFORM_MAC)
16 #define MODULE_SUFFIX ".dylib"
17 #elif defined(PLATFORM_LINUX)
18 #define MODULE_SUFFIX ".so"
19 #endif
20
21 typedef void (*module_symbol)(void);
22 char bin_path[MAX_PATH + 1];
23
24
25 void CallModule(const char* module) {
26 char module_path[MAX_PATH + 1];
27 const char* module_function = "module_main";
28 module_symbol funcptr;
29 #if defined(PLATFORM_WIN)
30 HMODULE dl;
31 char drive[_MAX_DRIVE];
32 char dir[_MAX_DIR];
33
34 if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR,
35 NULL, 0, NULL, 0)) {
36 fprintf(stderr, "Failed to split executable path.\n");
37 return;
38 }
39 if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) {
40 fprintf(stderr, "Failed to calculate module path.\n");
41 return;
42 }
43
44 dl = LoadLibrary(module_path);
45 if (!dl) {
46 fprintf(stderr, "Failed to open module: %s\n", module_path);
47 return;
48 }
49
50 funcptr = (module_symbol) GetProcAddress(dl, module_function);
51 if (!funcptr) {
52 fprintf(stderr, "Failed to find symbol: %s\n", module_function);
53 return;
54 }
55 funcptr();
56
57 FreeLibrary(dl);
58 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
59 void* dl;
60 char* path_copy = strdup(bin_path);
61 char* bin_dir = dirname(path_copy);
62 int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module,
63 MODULE_SUFFIX);
64 free(path_copy);
65 if (path_size < 0 || path_size > MAX_PATH) {
66 fprintf(stderr, "Failed to calculate module path.\n");
67 return;
68 }
69 module_path[path_size] = 0;
70
71 dl = dlopen(module_path, RTLD_LAZY);
72 if (!dl) {
73 fprintf(stderr, "Failed to open module: %s\n", module_path);
74 return;
75 }
76
77 funcptr = dlsym(dl, module_function);
78 if (!funcptr) {
79 fprintf(stderr, "Failed to find symbol: %s\n", module_function);
80 return;
81 }
82 funcptr();
83
84 dlclose(dl);
85 #endif
86 }
87
88 int main(int argc, char *argv[])
89 {
90 fprintf(stdout, "Hello from program.c\n");
91 fflush(stdout);
92
93 #if defined(PLATFORM_WIN)
94 if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) {
95 fprintf(stderr, "Failed to determine executable path.\n");
96 return;
97 }
98 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
99 // Using argv[0] should be OK here since we control how the tests run, and
100 // can avoid exec and such issues that make it unreliable.
101 if (!realpath(argv[0], bin_path)) {
102 fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]);
103 return;
104 }
105 #endif
106
107 CallModule("lib1");
108 CallModule("lib2");
109 return 0;
110 }
+0
-22
mozc_build_tools/gyp/test/msvs/express/base/base.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 },
13 {
14 'target_name': 'b',
15 'type': 'static_library',
16 'sources': [
17 'b.c',
18 ],
19 },
20 ],
21 }
+0
-19
mozc_build_tools/gyp/test/msvs/express/express.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'express',
8 'type': 'executable',
9 'dependencies': [
10 'base/base.gyp:a',
11 'base/base.gyp:b',
12 ],
13 'sources': [
14 'main.c',
15 ],
16 },
17 ],
18 }
+0
-29
mozc_build_tools/gyp/test/msvs/express/gyptest-express.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that flat solutions get generated for Express versions of
8 Visual Studio.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp(formats=['msvs'])
14
15 test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
16 test.must_contain('express.sln', '(base)')
17
18 test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
19 test.must_contain('express.sln', '(base)')
20
21 test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
22 test.must_not_contain('express.sln', '(base)')
23
24 test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
25 test.must_not_contain('express.sln', '(base)')
26
27
28 test.pass_test()
+0
-35
mozc_build_tools/gyp/test/multiple-targets/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('multiple.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 # TODO(sgk): remove stderr=None when the --generator-output= support
18 # gets rid of the scons warning
19 test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None)
20
21 expect1 = """\
22 hello from prog1.c
23 hello from common.c
24 """
25
26 expect2 = """\
27 hello from prog2.c
28 hello from common.c
29 """
30
31 test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
32 test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
33
34 test.pass_test()
+0
-35
mozc_build_tools/gyp/test/multiple-targets/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('multiple.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 # TODO(sgk): remove stderr=None when the --generator-output= support
18 # gets rid of the scons warning
19 test.build('multiple.gyp', chdir='relocate/src', stderr=None)
20
21 expect1 = """\
22 hello from prog1.c
23 hello from common.c
24 """
25
26 expect2 = """\
27 hello from prog2.c
28 hello from common.c
29 """
30
31 test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
32 test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
33
34 test.pass_test()
+0
-7
mozc_build_tools/gyp/test/multiple-targets/src/common.c less more
0 #include <stdio.h>
1
2 void common(void)
3 {
4 printf("hello from common.c\n");
5 return;
6 }
+0
-24
mozc_build_tools/gyp/test/multiple-targets/src/multiple.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'sources': [
10 'prog1.c',
11 'common.c',
12 ],
13 },
14 {
15 'target_name': 'prog2',
16 'type': 'executable',
17 'sources': [
18 'prog2.c',
19 'common.c',
20 ],
21 },
22 ],
23 }
+0
-10
mozc_build_tools/gyp/test/multiple-targets/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void common(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("hello from prog1.c\n");
7 common();
8 return 0;
9 }
+0
-10
mozc_build_tools/gyp/test/multiple-targets/src/prog2.c less more
0 #include <stdio.h>
1
2 extern void common(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("hello from prog2.c\n");
7 common();
8 return 0;
9 }
+0
-19
mozc_build_tools/gyp/test/no-output/gyptest-no-output.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verified things don't explode when there are targets without outputs.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('nooutput.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16 test.build('nooutput.gyp', chdir='relocate/src')
17
18 test.pass_test()
+0
-17
mozc_build_tools/gyp/test/no-output/src/nooutput.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'no_output',
8 'type': 'none',
9 'direct_dependent_settings': {
10 'defines': [
11 'NADA',
12 ],
13 },
14 },
15 ],
16 }
+0
-43
mozc_build_tools/gyp/test/product/gyptest-product.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using the default build target.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('product.gyp')
16 test.build('product.gyp')
17
18 # executables
19 test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True)
20 test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True)
21 test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True)
22
23 # shared libraries
24 test.built_file_must_exist(test.dll_ + 'alt4' + test._dll,
25 test.SHARED_LIB, bare=True)
26 test.built_file_must_exist(test.dll_ + 'hello5.stuff',
27 test.SHARED_LIB, bare=True)
28 test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True)
29
30 # static libraries
31 test.built_file_must_exist(test.lib_ + 'alt7' + test._lib,
32 test.STATIC_LIB, bare=True)
33 test.built_file_must_exist(test.lib_ + 'hello8.stuff',
34 test.STATIC_LIB, bare=True)
35 test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True)
36
37 # alternate product_dir
38 test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True)
39 test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True)
40 test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True)
41
42 test.pass_test()
+0
-15
mozc_build_tools/gyp/test/product/hello.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int func1(void) {
7 return 42;
8 }
9
10 int main(int argc, char *argv[]) {
11 printf("Hello, world!\n");
12 printf("%d\n", func1());
13 return 0;
14 }
+0
-128
mozc_build_tools/gyp/test/product/product.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello1',
8 'product_name': 'alt1',
9 'type': 'executable',
10 'sources': [
11 'hello.c',
12 ],
13 },
14 {
15 'target_name': 'hello2',
16 'product_extension': 'stuff',
17 'type': 'executable',
18 'sources': [
19 'hello.c',
20 ],
21 },
22 {
23 'target_name': 'hello3',
24 'product_name': 'alt3',
25 'product_extension': 'stuff',
26 'product_prefix': 'yo',
27 'type': 'executable',
28 'sources': [
29 'hello.c',
30 ],
31 },
32
33 {
34 'target_name': 'hello4',
35 'product_name': 'alt4',
36 'type': 'shared_library',
37 'sources': [
38 'hello.c',
39 ],
40 },
41 {
42 'target_name': 'hello5',
43 'product_extension': 'stuff',
44 'type': 'shared_library',
45 'sources': [
46 'hello.c',
47 ],
48 },
49 {
50 'target_name': 'hello6',
51 'product_name': 'alt6',
52 'product_extension': 'stuff',
53 'product_prefix': 'yo',
54 'type': 'shared_library',
55 'sources': [
56 'hello.c',
57 ],
58 },
59
60 {
61 'target_name': 'hello7',
62 'product_name': 'alt7',
63 'type': 'static_library',
64 'sources': [
65 'hello.c',
66 ],
67 },
68 {
69 'target_name': 'hello8',
70 'product_extension': 'stuff',
71 'type': 'static_library',
72 'sources': [
73 'hello.c',
74 ],
75 },
76 {
77 'target_name': 'hello9',
78 'product_name': 'alt9',
79 'product_extension': 'stuff',
80 'product_prefix': 'yo',
81 'type': 'static_library',
82 'sources': [
83 'hello.c',
84 ],
85 },
86 {
87 'target_name': 'hello10',
88 'product_name': 'alt10',
89 'product_extension': 'stuff',
90 'product_prefix': 'yo',
91 'product_dir': '<(PRODUCT_DIR)/bob',
92 'type': 'executable',
93 'sources': [
94 'hello.c',
95 ],
96 },
97 {
98 'target_name': 'hello11',
99 'product_name': 'alt11',
100 'product_extension': 'stuff',
101 'product_prefix': 'yo',
102 'product_dir': '<(PRODUCT_DIR)/bob',
103 'type': 'shared_library',
104 'sources': [
105 'hello.c',
106 ],
107 },
108 {
109 'target_name': 'hello12',
110 'product_name': 'alt12',
111 'product_extension': 'stuff',
112 'product_prefix': 'yo',
113 'product_dir': '<(PRODUCT_DIR)/bob',
114 'type': 'static_library',
115 'sources': [
116 'hello.c',
117 ],
118 },
119 ],
120 'conditions': [
121 ['OS=="linux"', {
122 'target_defaults': {
123 'cflags': ['-fPIC'],
124 },
125 }],
126 ],
127 }
+0
-47
mozc_build_tools/gyp/test/rules/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple rules when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('actions.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from program.c
22 Hello from function1.in
23 Hello from function2.in
24 """
25
26 if test.format == 'xcode':
27 chdir = 'relocate/src/subdir1'
28 else:
29 chdir = 'relocate/src'
30 test.run_built_executable('program', chdir=chdir, stdout=expect)
31
32 expect = """\
33 Hello from program.c
34 Hello from function3.in
35 """
36
37 if test.format == 'xcode':
38 chdir = 'relocate/src/subdir3'
39 else:
40 chdir = 'relocate/src'
41 test.run_built_executable('program2', chdir=chdir, stdout=expect)
42
43 test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
44 test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
45
46 test.pass_test()
+0
-47
mozc_build_tools/gyp/test/rules/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple rules when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('actions.gyp', chdir='relocate/src')
19
20 expect = """\
21 Hello from program.c
22 Hello from function1.in
23 Hello from function2.in
24 """
25
26 if test.format == 'xcode':
27 chdir = 'relocate/src/subdir1'
28 else:
29 chdir = 'relocate/src'
30 test.run_built_executable('program', chdir=chdir, stdout=expect)
31
32 expect = """\
33 Hello from program.c
34 Hello from function3.in
35 """
36
37 if test.format == 'xcode':
38 chdir = 'relocate/src/subdir3'
39 else:
40 chdir = 'relocate/src'
41 test.run_built_executable('program2', chdir=chdir, stdout=expect)
42
43 test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
44 test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
45
46 test.pass_test()
+0
-17
mozc_build_tools/gyp/test/rules/src/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 'subdir3/executable2.gyp:*',
13 ],
14 },
15 ],
16 }
+0
-11
mozc_build_tools/gyp/test/rules/src/copy-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5 import sys
6
7 contents = open(sys.argv[1], 'r').read()
8 open(sys.argv[2], 'wb').write(contents)
9
10 sys.exit(0)
+0
-37
mozc_build_tools/gyp/test/rules/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 'function1.in',
13 'function2.in',
14 ],
15 'rules': [
16 {
17 'rule_name': 'copy_file',
18 'extension': 'in',
19 'inputs': [
20 '../copy-file.py',
21 ],
22 'outputs': [
23 # TODO: fix SCons and Make to support generated files not
24 # in a variable-named path like <(INTERMEDIATE_DIR)
25 #'<(RULE_INPUT_ROOT).c',
26 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
27 ],
28 'action': [
29 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
30 ],
31 'process_outputs_as_sources': 1,
32 },
33 ],
34 },
35 ],
36 }
+0
-6
mozc_build_tools/gyp/test/rules/src/subdir1/function1.in less more
0 #include <stdio.h>
1
2 void function1(void)
3 {
4 printf("Hello from function1.in\n");
5 }
+0
-6
mozc_build_tools/gyp/test/rules/src/subdir1/function2.in less more
0 #include <stdio.h>
1
2 void function2(void)
3 {
4 printf("Hello from function2.in\n");
5 }
+0
-12
mozc_build_tools/gyp/test/rules/src/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void function1(void);
3 extern void function2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 function1();
9 function2();
10 return 0;
11 }
+0
-1
mozc_build_tools/gyp/test/rules/src/subdir2/file1.in less more
0 Hello from file1.in
+0
-1
mozc_build_tools/gyp/test/rules/src/subdir2/file2.in less more
0 Hello from file2.in
+0
-33
mozc_build_tools/gyp/test/rules/src/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'files',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'file1.in',
12 'file2.in',
13 ],
14 'rules': [
15 {
16 'rule_name': 'copy_file',
17 'extension': 'in',
18 'inputs': [
19 '../copy-file.py',
20 ],
21 'outputs': [
22 '<(RULE_INPUT_ROOT).out',
23 ],
24 'action': [
25 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
26 ],
27 'process_outputs_as_sources': 1,
28 },
29 ],
30 },
31 ],
32 }
+0
-37
mozc_build_tools/gyp/test/rules/src/subdir3/executable2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This one tests that rules are properly written if extensions are different
5 # between the target's sources (program.c) and the generated files
6 # (function3.cc)
7
8 {
9 'targets': [
10 {
11 'target_name': 'program2',
12 'type': 'executable',
13 'msvs_cygwin_shell': 0,
14 'sources': [
15 'program.c',
16 'function3.in',
17 ],
18 'rules': [
19 {
20 'rule_name': 'copy_file',
21 'extension': 'in',
22 'inputs': [
23 '../copy-file.py',
24 ],
25 'outputs': [
26 '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc',
27 ],
28 'action': [
29 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
30 ],
31 'process_outputs_as_sources': 1,
32 },
33 ],
34 },
35 ],
36 }
+0
-6
mozc_build_tools/gyp/test/rules/src/subdir3/function3.in less more
0 #include <stdio.h>
1
2 extern "C" void function3(void)
3 {
4 printf("Hello from function3.in\n");
5 }
+0
-10
mozc_build_tools/gyp/test/rules/src/subdir3/program.c less more
0 #include <stdio.h>
1
2 extern void function3(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from program.c\n");
7 function3();
8 return 0;
9 }
+0
-74
mozc_build_tools/gyp/test/rules-rebuild/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a rule that generates multiple outputs rebuilds
8 correctly when the inputs change.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 if test.format == 'msvs':
16 msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n'
17 test.skip_test(msg)
18
19 test.run_gyp('same_target.gyp', chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23
24 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
25
26 expect = """\
27 Hello from main.c
28 Hello from prog1.in!
29 Hello from prog2.in!
30 """
31
32 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
33
34 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
35
36
37 test.sleep()
38 contents = test.read(['relocate', 'src', 'prog1.in'])
39 contents = contents.replace('!', ' AGAIN!')
40 test.write(['relocate', 'src', 'prog1.in'], contents)
41
42 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
43
44 expect = """\
45 Hello from main.c
46 Hello from prog1.in AGAIN!
47 Hello from prog2.in!
48 """
49
50 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
51
52 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
53
54
55 test.sleep()
56 contents = test.read(['relocate', 'src', 'prog2.in'])
57 contents = contents.replace('!', ' AGAIN!')
58 test.write(['relocate', 'src', 'prog2.in'], contents)
59
60 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
61
62 expect = """\
63 Hello from main.c
64 Hello from prog1.in AGAIN!
65 Hello from prog2.in AGAIN!
66 """
67
68 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
69
70 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
71
72
73 test.pass_test()
+0
-74
mozc_build_tools/gyp/test/rules-rebuild/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a rule that generates multiple outputs rebuilds
8 correctly when the inputs change.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 if test.format == 'msvs':
16 msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n'
17 test.skip_test(msg)
18
19 test.run_gyp('same_target.gyp', chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23
24 test.build('same_target.gyp', chdir='relocate/src')
25
26 expect = """\
27 Hello from main.c
28 Hello from prog1.in!
29 Hello from prog2.in!
30 """
31
32 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
33
34 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
35
36
37 test.sleep()
38 contents = test.read(['relocate', 'src', 'prog1.in'])
39 contents = contents.replace('!', ' AGAIN!')
40 test.write(['relocate', 'src', 'prog1.in'], contents)
41
42 test.build('same_target.gyp', chdir='relocate/src')
43
44 expect = """\
45 Hello from main.c
46 Hello from prog1.in AGAIN!
47 Hello from prog2.in!
48 """
49
50 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
51
52 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
53
54
55 test.sleep()
56 contents = test.read(['relocate', 'src', 'prog2.in'])
57 contents = contents.replace('!', ' AGAIN!')
58 test.write(['relocate', 'src', 'prog2.in'], contents)
59
60 test.build('same_target.gyp', chdir='relocate/src')
61
62 expect = """\
63 Hello from main.c
64 Hello from prog1.in AGAIN!
65 Hello from prog2.in AGAIN!
66 """
67
68 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
69
70 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
71
72
73 test.pass_test()
+0
-12
mozc_build_tools/gyp/test/rules-rebuild/src/main.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from main.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-15
mozc_build_tools/gyp/test/rules-rebuild/src/make-sources.py less more
0 #!/usr/bin/env python
1 import sys
2
3 assert len(sys.argv) == 4, sys.argv
4
5 (in_file, c_file, h_file) = sys.argv[1:]
6
7 def write_file(filename, contents):
8 open(filename, 'wb').write(contents)
9
10 write_file(c_file, open(in_file, 'rb').read())
11
12 write_file(h_file, '#define NAME "%s"\n' % in_file)
13
14 sys.exit(0)
+0
-7
mozc_build_tools/gyp/test/rules-rebuild/src/prog1.in less more
0 #include <stdio.h>
1 #include "prog1.h"
2
3 void prog1(void)
4 {
5 printf("Hello from %s!\n", NAME);
6 }
+0
-7
mozc_build_tools/gyp/test/rules-rebuild/src/prog2.in less more
0 #include <stdio.h>
1 #include "prog2.h"
2
3 void prog2(void)
4 {
5 printf("Hello from %s!\n", NAME);
6 }
+0
-32
mozc_build_tools/gyp/test/rules-rebuild/src/same_target.gyp less more
0 {
1 'targets': [
2 {
3 'target_name': 'program',
4 'type': 'executable',
5 'msvs_cygwin_shell': 0,
6 'sources': [
7 'main.c',
8 'prog1.in',
9 'prog2.in',
10 ],
11 'rules': [
12 {
13 'rule_name': 'make_sources',
14 'extension': 'in',
15 'msvs_external_rule': 1,
16 'inputs': [
17 'make-sources.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
21 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
22 ],
23 'action': [
24 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)',
25 ],
26 'process_outputs_as_sources': 1,
27 },
28 ],
29 },
30 ],
31 }
+0
-34
mozc_build_tools/gyp/test/same-gyp-name/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp that depends on 2 gyp files with the same name.
8 """
9
10 import TestGyp
11
12 # This causes a problem on XCode (duplicate ID).
13 # See http://code.google.com/p/gyp/issues/detail?id=114
14 test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
15
16 test.run_gyp('all.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 test.build('all.gyp', test.ALL, chdir='relocate/src')
21
22 expect1 = """\
23 Hello from main1.cc
24 """
25
26 expect2 = """\
27 Hello from main2.cc
28 """
29
30 test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-34
mozc_build_tools/gyp/test/same-gyp-name/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp that depends on 2 gyp files with the same name.
8 """
9
10 import TestGyp
11
12 # This causes a problem on XCode (duplicate ID).
13 # See http://code.google.com/p/gyp/issues/detail?id=114
14 test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
15
16 test.run_gyp('all.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 test.build('all.gyp', chdir='relocate/src')
21
22 expect1 = """\
23 Hello from main1.cc
24 """
25
26 expect2 = """\
27 Hello from main2.cc
28 """
29
30 test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-16
mozc_build_tools/gyp/test/same-gyp-name/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'all_exes',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/executable.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-15
mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program1',
8 'type': 'executable',
9 'sources': [
10 'main1.cc',
11 ],
12 },
13 ],
14 }
+0
-6
mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/main1.cc less more
0 #include <stdio.h>
1
2 int main() {
3 printf("Hello from main1.cc\n");
4 return 0;
5 }
+0
-15
mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program2',
8 'type': 'executable',
9 'sources': [
10 'main2.cc',
11 ],
12 },
13 ],
14 }
+0
-6
mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/main2.cc less more
0 #include <stdio.h>
1
2 int main() {
3 printf("Hello from main2.cc\n");
4 return 0;
5 }
+0
-34
mozc_build_tools/gyp/test/same-name/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp with two targets that share a common .c source file.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('all.gyp', test.ALL, chdir='relocate/src')
19
20 expect1 = """\
21 Hello from prog1.c
22 Hello prog1 from func.c
23 """
24
25 expect2 = """\
26 Hello from prog2.c
27 Hello prog2 from func.c
28 """
29
30 test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-34
mozc_build_tools/gyp/test/same-name/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp with two targets that share a common .c source file.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('all.gyp', chdir='relocate/src')
19
20 expect1 = """\
21 Hello from prog1.c
22 Hello prog1 from func.c
23 """
24
25 expect2 = """\
26 Hello from prog2.c
27 Hello prog2 from func.c
28 """
29
30 test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-38
mozc_build_tools/gyp/test/same-name/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'defines': [
10 'PROG="prog1"',
11 ],
12 'sources': [
13 'prog1.c',
14 'func.c',
15 # Uncomment to test same-named files in different directories,
16 # which Visual Studio doesn't support.
17 #'subdir1/func.c',
18 #'subdir2/func.c',
19 ],
20 },
21 {
22 'target_name': 'prog2',
23 'type': 'executable',
24 'defines': [
25 'PROG="prog2"',
26 ],
27 'sources': [
28 'prog2.c',
29 'func.c',
30 # Uncomment to test same-named files in different directories,
31 # which Visual Studio doesn't support.
32 #'subdir1/func.c',
33 #'subdir2/func.c',
34 ],
35 },
36 ],
37 }
+0
-6
mozc_build_tools/gyp/test/same-name/src/func.c less more
0 #include <stdio.h>
1
2 void func(void)
3 {
4 printf("Hello %s from func.c\n", PROG);
5 }
+0
-16
mozc_build_tools/gyp/test/same-name/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void func(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog1.c\n");
7 func();
8 /*
9 * Uncomment to test same-named files in different directories,
10 * which Visual Studio doesn't support.
11 subdir1_func();
12 subdir2_func();
13 */
14 return 0;
15 }
+0
-16
mozc_build_tools/gyp/test/same-name/src/prog2.c less more
0 #include <stdio.h>
1
2 extern void func(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog2.c\n");
7 func();
8 /*
9 * Uncomment to test same-named files in different directories,
10 * which Visual Studio doesn't support.
11 subdir1_func();
12 subdir2_func();
13 */
14 return 0;
15 }
+0
-6
mozc_build_tools/gyp/test/same-name/src/subdir1/func.c less more
0 #include <stdio.h>
1
2 void subdir1_func(void)
3 {
4 printf("Hello %s from subdir1/func.c\n", PROG);
5 }
+0
-6
mozc_build_tools/gyp/test/same-name/src/subdir2/func.c less more
0 #include <stdio.h>
1
2 void subdir2_func(void)
3 {
4 printf("Hello %s from subdir2/func.c\n", PROG);
5 }
+0
-26
mozc_build_tools/gyp/test/scons_tools/gyptest-tools.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a scons build picks up tools modules specified
8 via 'scons_tools' in the 'scons_settings' dictionary.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('tools.gyp')
16
17 test.build('tools.gyp', test.ALL)
18
19 if test.format == 'scons':
20 expect = "Hello, world!\n"
21 else:
22 expect = ""
23 test.run_built_executable('tools', stdout=expect)
24
25 test.pass_test()
+0
-10
mozc_build_tools/gyp/test/scons_tools/site_scons/site_tools/this_tool.py less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # SCons "tool" module that simply sets a -D value.
5 def generate(env):
6 env['CPPDEFINES'] = ['THIS_TOOL']
7
8 def exists(env):
9 pass
+0
-13
mozc_build_tools/gyp/test/scons_tools/tools.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 #ifdef THIS_TOOL
9 printf("Hello, world!\n");
10 #endif
11 return 0;
12 }
+0
-18
mozc_build_tools/gyp/test/scons_tools/tools.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'tools',
8 'type': 'executable',
9 'sources': [
10 'tools.c',
11 ],
12 },
13 ],
14 'scons_settings': {
15 'tools': ['default', 'this_tool'],
16 },
17 }
+0
-39
mozc_build_tools/gyp/test/sibling/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('build/all.gyp', chdir='src')
14
15 test.build('build/all.gyp', test.ALL, chdir='src')
16
17 chdir = 'src/build'
18
19 # The top-level Makefile is in the directory where gyp was run.
20 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
21 # file? What about when passing in multiple .gyp files? Would sub-project
22 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
23 if test.format == 'make':
24 chdir = 'src'
25
26 if test.format == 'xcode':
27 chdir = 'src/prog1'
28 test.run_built_executable('prog1',
29 chdir=chdir,
30 stdout="Hello from prog1.c\n")
31
32 if test.format == 'xcode':
33 chdir = 'src/prog2'
34 test.run_built_executable('prog2',
35 chdir=chdir,
36 stdout="Hello from prog2.c\n")
37
38 test.pass_test()
+0
-41
mozc_build_tools/gyp/test/sibling/gyptest-relocate.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('build/all.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 test.build('build/all.gyp', test.ALL, chdir='relocate/src')
18
19 chdir = 'relocate/src/build'
20
21 # The top-level Makefile is in the directory where gyp was run.
22 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
23 # file? What about when passing in multiple .gyp files? Would sub-project
24 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
25 if test.format == 'make':
26 chdir = 'relocate/src'
27
28 if test.format == 'xcode':
29 chdir = 'relocate/src/prog1'
30 test.run_built_executable('prog1',
31 chdir=chdir,
32 stdout="Hello from prog1.c\n")
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/prog2'
36 test.run_built_executable('prog2',
37 chdir=chdir,
38 stdout="Hello from prog2.c\n")
39
40 test.pass_test()
+0
-17
mozc_build_tools/gyp/test/sibling/src/build/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 # TODO(sgk): a target name of 'all' leads to a scons dependency cycle
8 'target_name': 'All',
9 'type': 'none',
10 'dependencies': [
11 '../prog1/prog1.gyp:*',
12 '../prog2/prog2.gyp:*',
13 ],
14 },
15 ],
16 }
+0
-7
mozc_build_tools/gyp/test/sibling/src/prog1/prog1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog1.c\n");
5 return 0;
6 }
+0
-15
mozc_build_tools/gyp/test/sibling/src/prog1/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'sources': [
10 'prog1.c',
11 ],
12 },
13 ],
14 }
+0
-7
mozc_build_tools/gyp/test/sibling/src/prog2/prog2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog2.c\n");
5 return 0;
6 }
+0
-15
mozc_build_tools/gyp/test/sibling/src/prog2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog2',
8 'type': 'executable',
9 'sources': [
10 'prog2.c',
11 ],
12 },
13 ],
14 }
+0
-36
mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 The configuration sets the Xcode SYMROOT variable and uses --depth=
13 to make Xcode behave like the other build tools--that is, put all
14 built targets in a single output build directory at the top of the tree.
15 """
16
17 import TestGyp
18
19 test = TestGyp.TestGyp()
20
21 test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
22
23 test.relocate('src', 'relocate/src')
24
25 # Suppress the test infrastructure's setting SYMROOT on the command line.
26 test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
27
28 test.run_built_executable('prog1',
29 stdout="Hello from prog1.c\n",
30 chdir='relocate/src')
31 test.run_built_executable('prog2',
32 stdout="Hello from prog2.c\n",
33 chdir='relocate/src')
34
35 test.pass_test()
+0
-37
mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 The configuration sets the Xcode SYMROOT variable and uses --depth=
13 to make Xcode behave like the other build tools--that is, put all
14 built targets in a single output build directory at the top of the tree.
15 """
16
17 import TestGyp
18
19 test = TestGyp.TestGyp()
20
21 test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
22
23 test.relocate('src', 'relocate/src')
24
25 # Suppress the test infrastructure's setting SYMROOT on the command line.
26 test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
27
28 test.run_built_executable('prog1',
29 stdout="Hello from prog1.c\n",
30 chdir='relocate/src')
31
32 test.run_built_executable('prog2',
33 stdout="Hello from prog2.c\n",
34 chdir='relocate/src')
35
36 test.pass_test()
+0
-33
mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a subsidiary dependent target from a .gyp file in a
8 subdirectory, without specifying an explicit output build directory,
9 and using the subdirectory's solution or project file as the entry point.
10 """
11
12 import TestGyp
13 import errno
14
15 test = TestGyp.TestGyp()
16
17 test.run_gyp('prog1.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 chdir = 'relocate/src/subdir'
22 target = test.ALL
23
24 test.build('prog2.gyp', target, chdir=chdir)
25
26 test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
27
28 test.run_built_executable('prog2',
29 chdir=chdir,
30 stdout="Hello from prog2.c\n")
31
32 test.pass_test()
+0
-32
mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a subsidiary dependent target from a .gyp file in a
8 subdirectory, without specifying an explicit output build directory,
9 and using the subdirectory's solution or project file as the entry point.
10 """
11
12 import TestGyp
13 import errno
14
15 test = TestGyp.TestGyp()
16
17 test.run_gyp('prog1.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 chdir = 'relocate/src/subdir'
22
23 test.build('prog2.gyp', chdir=chdir)
24
25 test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
26
27 test.run_built_executable('prog2',
28 chdir=chdir,
29 stdout="Hello from prog2.c\n")
30
31 test.pass_test()
+0
-25
mozc_build_tools/gyp/test/subdirectory/gyptest-subdir2-deep.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a project rooted several layers under src_dir works.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2')
19
20 test.run_built_executable('prog3',
21 chdir='relocate/src/subdir/subdir2',
22 stdout="Hello from prog3.c\n")
23
24 test.pass_test()
+0
-43
mozc_build_tools/gyp/test/subdirectory/gyptest-top-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 There is a difference here in the default behavior of the underlying
13 build tools. Specifically, when building the entire "solution", Xcode
14 puts the output of each project relative to the .xcodeproj directory,
15 while Visual Studio (and our implementations of SCons and Make) put it
16 in a build directory relative to the "solution"--that is, the entry-point
17 from which you built the entire tree.
18 """
19
20 import TestGyp
21
22 test = TestGyp.TestGyp()
23
24 test.run_gyp('prog1.gyp', chdir='src')
25
26 test.relocate('src', 'relocate/src')
27
28 test.build('prog1.gyp', test.ALL, chdir='relocate/src')
29
30 test.run_built_executable('prog1',
31 stdout="Hello from prog1.c\n",
32 chdir='relocate/src')
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/subdir'
36 else:
37 chdir = 'relocate/src'
38 test.run_built_executable('prog2',
39 chdir=chdir,
40 stdout="Hello from prog2.c\n")
41
42 test.pass_test()
+0
-43
mozc_build_tools/gyp/test/subdirectory/gyptest-top-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 There is a difference here in the default behavior of the underlying
13 build tools. Specifically, when building the entire "solution", Xcode
14 puts the output of each project relative to the .xcodeproj directory,
15 while Visual Studio (and our implementations of SCons and Make) put it
16 in a build directory relative to the "solution"--that is, the entry-point
17 from which you built the entire tree.
18 """
19
20 import TestGyp
21
22 test = TestGyp.TestGyp()
23
24 test.run_gyp('prog1.gyp', chdir='src')
25
26 test.relocate('src', 'relocate/src')
27
28 test.build('prog1.gyp', chdir='relocate/src')
29
30 test.run_built_executable('prog1',
31 stdout="Hello from prog1.c\n",
32 chdir='relocate/src')
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/subdir'
36 else:
37 chdir = 'relocate/src'
38 test.run_built_executable('prog2',
39 chdir=chdir,
40 stdout="Hello from prog2.c\n")
41
42 test.pass_test()
+0
-7
mozc_build_tools/gyp/test/subdirectory/src/prog1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog1.c\n");
5 return 0;
6 }
+0
-21
mozc_build_tools/gyp/test/subdirectory/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog1',
11 'type': 'executable',
12 'dependencies': [
13 'subdir/prog2.gyp:prog2',
14 ],
15 'sources': [
16 'prog1.c',
17 ],
18 },
19 ],
20 }
+0
-7
mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog2.c\n");
5 return 0;
6 }
+0
-18
mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'sources': [
13 'prog2.c',
14 ],
15 },
16 ],
17 }
+0
-7
mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog3.c\n");
5 return 0;
6 }
+0
-18
mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'sources': [
13 'prog3.c',
14 ],
15 },
16 ],
17 }
+0
-16
mozc_build_tools/gyp/test/subdirectory/src/symroot.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'set_symroot%': 0,
7 },
8 'conditions': [
9 ['set_symroot == 1', {
10 'xcode_settings': {
11 'SYMROOT': '<(DEPTH)/build',
12 },
13 }],
14 ],
15 }
+0
-23
mozc_build_tools/gyp/test/toolsets/gyptest-toolsets.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that toolsets are correctly applied
8 """
9
10 import TestGyp
11
12 # Multiple toolsets are currently only supported by the make generator.
13 test = TestGyp.TestGyp(formats=['make'])
14
15 test.run_gyp('toolsets.gyp')
16
17 test.build('toolsets.gyp', test.ALL)
18
19 test.run_built_executable('host-main', stdout="Host\n")
20 test.run_built_executable('target-main', stdout="Target\n")
21
22 test.pass_test()
+0
-11
mozc_build_tools/gyp/test/toolsets/main.cc less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 const char *GetToolset();
7
8 int main(int argc, char *argv[]) {
9 printf("%s\n", GetToolset());
10 }
+0
-11
mozc_build_tools/gyp/test/toolsets/toolsets.cc less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 const char *GetToolset() {
5 #ifdef TARGET
6 return "Target";
7 #else
8 return "Host";
9 #endif
10 }
+0
-38
mozc_build_tools/gyp/test/toolsets/toolsets.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'target_conditions': [
7 ['_toolset=="target"', {'defines': ['TARGET']}]
8 ]
9 },
10 'targets': [
11 {
12 'target_name': 'toolsets',
13 'type': 'static_library',
14 'toolsets': ['target', 'host'],
15 'sources': [
16 'toolsets.cc',
17 ],
18 },
19 {
20 'target_name': 'host-main',
21 'type': 'executable',
22 'toolsets': ['host'],
23 'dependencies': ['toolsets'],
24 'sources': [
25 'main.cc',
26 ],
27 },
28 {
29 'target_name': 'target-main',
30 'type': 'executable',
31 'dependencies': ['toolsets'],
32 'sources': [
33 'main.cc',
34 ],
35 },
36 ],
37 }
+0
-128
mozc_build_tools/gyp/test/variables/commands/commands-repeated.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a simple test file to make sure that variable substitution
5 # happens correctly. Run "run_tests.py" using python to generate the
6 # output from this gyp file.
7
8 {
9 'variables': {
10 'pi': 'import math; print math.pi',
11 'third_letters': "<(other_letters)HIJK",
12 'letters_list': 'ABCD',
13 'other_letters': '<(letters_list)EFG',
14 'check_included': '<(included_variable)',
15 'check_lists': [
16 '<(included_variable)',
17 '<(third_letters)',
18 ],
19 'check_int': 5,
20 'check_str_int': '6',
21 'check_list_int': [
22 7,
23 '8',
24 9,
25 ],
26 'not_int_1': ' 10',
27 'not_int_2': '11 ',
28 'not_int_3': '012',
29 'not_int_4': '13.0',
30 'not_int_5': '+14',
31 'negative_int': '-15',
32 'zero_int': '0',
33 },
34 'includes': [
35 'commands.gypi',
36 ],
37 'targets': [
38 {
39 'target_name': 'foo',
40 'type': 'none',
41 'variables': {
42 'var1': '<!(["python", "-c", "<(pi)"])',
43 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
44 'var3': '<!(python -c "print \'<(letters_list)\'")',
45 'var4': '<(<!(python -c "print \'letters_list\'"))',
46 'var5': 'letters_',
47 'var6': 'list',
48 'var7': '<(check_int)',
49 'var8': '<(check_int)blah',
50 'var9': '<(check_str_int)',
51 'var10': '<(check_list_int)',
52 'var11': ['<@(check_list_int)'],
53 'var12': '<(not_int_1)',
54 'var13': '<(not_int_2)',
55 'var14': '<(not_int_3)',
56 'var15': '<(not_int_4)',
57 'var16': '<(not_int_5)',
58 'var17': '<(negative_int)',
59 'var18': '<(zero_int)',
60 # A second set with different names to make sure they only execute the
61 # commands once.
62 'var1prime': '<!(["python", "-c", "<(pi)"])',
63 'var2prime': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
64 'var3prime': '<!(python -c "print \'<(letters_list)\'")',
65 'var4prime': '<(<!(python -c "print \'letters_list\'"))',
66 },
67 'actions': [
68 {
69 'action_name': 'test_action',
70 'variables': {
71 'var7': '<!(echo <(var5)<(var6))',
72 },
73 'inputs' : [
74 '<(var2)',
75 ],
76 'outputs': [
77 '<(var4)',
78 '<(var7)',
79 ],
80 'action': [
81 'echo',
82 '<(_inputs)',
83 '<(_outputs)',
84 ],
85 },
86 # Again with the same vars to make sure the right things happened.
87 {
88 'action_name': 'test_action_prime',
89 'variables': {
90 'var7': '<!(echo <(var5)<(var6))',
91 },
92 'inputs' : [
93 '<(var2)',
94 ],
95 'outputs': [
96 '<(var4)',
97 '<(var7)',
98 ],
99 'action': [
100 'echo',
101 '<(_inputs)',
102 '<(_outputs)',
103 ],
104 },
105 # And one more time with the other vars...
106 {
107 'action_name': 'test_action_prime_prime',
108 'variables': {
109 'var7': '<!(echo <(var5)<(var6))',
110 },
111 'inputs' : [
112 '<(var2prime)',
113 ],
114 'outputs': [
115 '<(var4prime)',
116 '<(var7)',
117 ],
118 'action': [
119 'echo',
120 '<(_inputs)',
121 '<(_outputs)',
122 ],
123 },
124 ],
125 },
126 ],
127 }
+0
-404
mozc_build_tools/gyp/test/variables/commands/commands-repeated.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: msvs_version: None
2 GENERAL: suffix: ''
3 GENERAL: includes: None
4 GENERAL: use_environment: True
5 GENERAL: depth: '.'
6 GENERAL: generator_flags: []
7 GENERAL: generator_output: None
8 GENERAL: formats: ['gypd']
9 GENERAL: debug: ['variables', 'general']
10 GENERAL: circular_check: True
11 GENERAL: check: None
12 GENERAL: defines: None
13 GENERAL: cmdline_default_variables: {}
14 GENERAL: generator_flags: {}
15 VARIABLES: Expanding '0' to 0
16 VARIABLES: Expanding '11 ' to '11 '
17 VARIABLES: Expanding '+14' to '+14'
18 VARIABLES: Expanding '-15' to -15
19 VARIABLES: Expanding ' 10' to ' 10'
20 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
21 VARIABLES: Expanding 'letters_list' to 'letters_list'
22 VARIABLES: Found output 'ABCDEFG', recursing.
23 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
24 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
25 VARIABLES: Expanding '012' to '012'
26 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
27 VARIABLES: Expanding 'other_letters' to 'other_letters'
28 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
29 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
30 VARIABLES: Expanding 'letters_list' to 'letters_list'
31 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
32 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
33 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding 'XYZ' to 'XYZ'
36 VARIABLES: Expanding 'ABCD' to 'ABCD'
37 VARIABLES: Expanding '13.0' to '13.0'
38 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
39 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
40 VARIABLES: Expanding 'included_variable' to 'included_variable'
41 VARIABLES: Found output 'XYZ', recursing.
42 VARIABLES: Expanding 'XYZ' to 'XYZ'
43 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
44 VARIABLES: Expanding '6' to 6
45 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
46 VARIABLES: Expanding 'included_variable' to 'included_variable'
47 VARIABLES: Found output 'XYZ', recursing.
48 VARIABLES: Expanding 'XYZ' to 'XYZ'
49 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
50 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
51 VARIABLES: Expanding 'third_letters' to 'third_letters'
52 VARIABLES: Found output '<(other_letters)HIJK', recursing.
53 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
54 VARIABLES: Expanding 'other_letters' to 'other_letters'
55 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
56 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
57 VARIABLES: Expanding 'letters_list' to 'letters_list'
58 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
59 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
60 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '8' to 8
64 VARIABLES: Expanding '.' to '.'
65 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
66 VARIABLES: Expanding 'letters_list' to 'letters_list'
67 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
68 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
69 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
70 VARIABLES: Expanding 'pi' to 'pi'
71 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
72 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
73 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
74 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
75 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
76 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
77 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
78 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
79 VARIABLES: Found output '3.14159265359 ABCD', recursing.
80 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
81 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
82 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
83 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
84 VARIABLES: Expanding 'pi' to 'pi'
85 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
86 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
87 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
89 VARIABLES: Found output '3.14159265359', recursing.
90 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
91 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
92 VARIABLES: Expanding 'letters_' to 'letters_'
93 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
94 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
95 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
96 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
97 VARIABLES: Found output 'letters_list', recursing.
98 VARIABLES: Expanding 'letters_list' to 'letters_list'
99 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
100 VARIABLES: Found output 'ABCD', recursing.
101 VARIABLES: Expanding 'ABCD' to 'ABCD'
102 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
103 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
104 VARIABLES: Expanding 'check_int' to 'check_int'
105 VARIABLES: Found output '5', recursing.
106 VARIABLES: Expanding '5' to 5
107 VARIABLES: Expanding '<(check_int)' to 5
108 VARIABLES: Expanding 'list' to 'list'
109 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
110 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
111 VARIABLES: Expanding 'pi' to 'pi'
112 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
113 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
114 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
115 VARIABLES: Had cache value for command '['python', '-c', 'import math; print math.pi']' in directory 'None'
116 VARIABLES: Found output '3.14159265359', recursing.
117 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
118 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
119 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
120 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
121 VARIABLES: Expanding 'letters_list' to 'letters_list'
122 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
123 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
124 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
125 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
126 VARIABLES: Found output 'ABCD', recursing.
127 VARIABLES: Expanding 'ABCD' to 'ABCD'
128 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
129 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
130 VARIABLES: Expanding 'letters_list' to 'letters_list'
131 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
132 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
133 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
134 VARIABLES: Expanding 'pi' to 'pi'
135 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
136 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
137 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
138 VARIABLES: Had cache value for command 'python -c "import math; print math.pi"' in directory 'None'
139 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
140 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
141 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
142 VARIABLES: Had cache value for command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
143 VARIABLES: Found output '3.14159265359 ABCD', recursing.
144 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
145 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
146 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
147 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
148 VARIABLES: Found output '6', recursing.
149 VARIABLES: Expanding '6' to 6
150 VARIABLES: Expanding '<(check_str_int)' to 6
151 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
152 VARIABLES: Expanding 'check_int' to 'check_int'
153 VARIABLES: Found output '5blah', recursing.
154 VARIABLES: Expanding '5blah' to '5blah'
155 VARIABLES: Expanding '<(check_int)blah' to '5blah'
156 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
157 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
158 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
159 VARIABLES: Had cache value for command 'python -c "print 'letters_list'"' in directory 'None'
160 VARIABLES: Found output 'letters_list', recursing.
161 VARIABLES: Expanding 'letters_list' to 'letters_list'
162 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
163 VARIABLES: Found output 'ABCD', recursing.
164 VARIABLES: Expanding 'ABCD' to 'ABCD'
165 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
166 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
167 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
168 VARIABLES: Expanding 'letters_list' to 'letters_list'
169 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
170 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
171 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
172 VARIABLES: Had cache value for command 'python -c "print 'ABCD'"' in directory 'None'
173 VARIABLES: Found output 'ABCD', recursing.
174 VARIABLES: Expanding 'ABCD' to 'ABCD'
175 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
176 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
177 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
178 VARIABLES: Found output '13.0', recursing.
179 VARIABLES: Expanding '13.0' to '13.0'
180 VARIABLES: Expanding '<(not_int_4)' to '13.0'
181 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
182 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
183 VARIABLES: Found output '012', recursing.
184 VARIABLES: Expanding '012' to '012'
185 VARIABLES: Expanding '<(not_int_3)' to '012'
186 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
187 VARIABLES: Expanding 'negative_int' to 'negative_int'
188 VARIABLES: Found output '-15', recursing.
189 VARIABLES: Expanding '-15' to -15
190 VARIABLES: Expanding '<(negative_int)' to -15
191 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
192 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
193 VARIABLES: Found output '+14', recursing.
194 VARIABLES: Expanding '+14' to '+14'
195 VARIABLES: Expanding '<(not_int_5)' to '+14'
196 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
197 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
198 VARIABLES: Found output '7 8 9', recursing.
199 VARIABLES: Expanding '7 8 9' to '7 8 9'
200 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
201 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
202 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
203 VARIABLES: Found output '11 ', recursing.
204 VARIABLES: Expanding '11 ' to '11 '
205 VARIABLES: Expanding '<(not_int_2)' to '11 '
206 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
207 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
208 VARIABLES: Found output ' 10', recursing.
209 VARIABLES: Expanding ' 10' to ' 10'
210 VARIABLES: Expanding '<(not_int_1)' to ' 10'
211 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
212 VARIABLES: Expanding 'zero_int' to 'zero_int'
213 VARIABLES: Found output '0', recursing.
214 VARIABLES: Expanding '0' to 0
215 VARIABLES: Expanding '<(zero_int)' to 0
216 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
217 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
218 VARIABLES: Found output [7, 8, 9], recursing.
219 VARIABLES: Expanding 7 to 7
220 VARIABLES: Expanding 8 to 8
221 VARIABLES: Expanding 9 to 9
222 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
223 VARIABLES: Expanding 'foo' to 'foo'
224 VARIABLES: Expanding 'target' to 'target'
225 VARIABLES: Expanding 'none' to 'none'
226 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
227 VARIABLES: Expanding 'var6' to 'var6'
228 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
229 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
230 VARIABLES: Expanding 'var5' to 'var5'
231 VARIABLES: Found output 'echo letters_list', recursing.
232 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
233 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
234 VARIABLES: Executing command 'echo letters_list' in directory 'None'
235 VARIABLES: Found output 'letters_list', recursing.
236 VARIABLES: Expanding 'letters_list' to 'letters_list'
237 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
238 VARIABLES: Expanding 'test_action' to 'test_action'
239 VARIABLES: Expanding 'echo' to 'echo'
240 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
241 VARIABLES: Expanding '_inputs' to '_inputs'
242 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
243 VARIABLES: Expanding 'var2' to 'var2'
244 VARIABLES: Found output '3.14159265359 ABCD', recursing.
245 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
246 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
247 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
248 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
249 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
250 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
251 VARIABLES: Expanding '_outputs' to '_outputs'
252 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
253 VARIABLES: Expanding 'var4' to 'var4'
254 VARIABLES: Found output 'ABCD', recursing.
255 VARIABLES: Expanding 'ABCD' to 'ABCD'
256 VARIABLES: Expanding '<(var4)' to 'ABCD'
257 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
258 VARIABLES: Expanding 'var7' to 'var7'
259 VARIABLES: Found output 'letters_list', recursing.
260 VARIABLES: Expanding 'letters_list' to 'letters_list'
261 VARIABLES: Expanding '<(var7)' to 'letters_list'
262 VARIABLES: Found output 'ABCD letters_list', recursing.
263 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
264 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
265 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
266 VARIABLES: Expanding 'ABCD' to 'ABCD'
267 VARIABLES: Expanding 'letters_list' to 'letters_list'
268 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
269 VARIABLES: Expanding 'var6' to 'var6'
270 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
271 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
272 VARIABLES: Expanding 'var5' to 'var5'
273 VARIABLES: Found output 'echo letters_list', recursing.
274 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
275 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
276 VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
277 VARIABLES: Found output 'letters_list', recursing.
278 VARIABLES: Expanding 'letters_list' to 'letters_list'
279 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
280 VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
281 VARIABLES: Expanding 'echo' to 'echo'
282 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
283 VARIABLES: Expanding '_inputs' to '_inputs'
284 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
285 VARIABLES: Expanding 'var2' to 'var2'
286 VARIABLES: Found output '3.14159265359 ABCD', recursing.
287 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
288 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
289 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
290 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
291 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
292 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
293 VARIABLES: Expanding '_outputs' to '_outputs'
294 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
295 VARIABLES: Expanding 'var4' to 'var4'
296 VARIABLES: Found output 'ABCD', recursing.
297 VARIABLES: Expanding 'ABCD' to 'ABCD'
298 VARIABLES: Expanding '<(var4)' to 'ABCD'
299 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
300 VARIABLES: Expanding 'var7' to 'var7'
301 VARIABLES: Found output 'letters_list', recursing.
302 VARIABLES: Expanding 'letters_list' to 'letters_list'
303 VARIABLES: Expanding '<(var7)' to 'letters_list'
304 VARIABLES: Found output 'ABCD letters_list', recursing.
305 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
306 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
307 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
308 VARIABLES: Expanding 'ABCD' to 'ABCD'
309 VARIABLES: Expanding 'letters_list' to 'letters_list'
310 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
311 VARIABLES: Expanding 'var6' to 'var6'
312 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
313 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
314 VARIABLES: Expanding 'var5' to 'var5'
315 VARIABLES: Found output 'echo letters_list', recursing.
316 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
317 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
318 VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
319 VARIABLES: Found output 'letters_list', recursing.
320 VARIABLES: Expanding 'letters_list' to 'letters_list'
321 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
322 VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
323 VARIABLES: Expanding 'echo' to 'echo'
324 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
325 VARIABLES: Expanding '_inputs' to '_inputs'
326 VARIABLES: Matches: {'content': 'var2prime', 'is_array': '', 'type': '<', 'replace': '<(var2prime)'}
327 VARIABLES: Expanding 'var2prime' to 'var2prime'
328 VARIABLES: Found output '3.14159265359 ABCD', recursing.
329 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
330 VARIABLES: Expanding '<(var2prime)' to '3.14159265359 ABCD'
331 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
332 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
333 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
334 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
335 VARIABLES: Expanding '_outputs' to '_outputs'
336 VARIABLES: Matches: {'content': 'var4prime', 'is_array': '', 'type': '<', 'replace': '<(var4prime)'}
337 VARIABLES: Expanding 'var4prime' to 'var4prime'
338 VARIABLES: Found output 'ABCD', recursing.
339 VARIABLES: Expanding 'ABCD' to 'ABCD'
340 VARIABLES: Expanding '<(var4prime)' to 'ABCD'
341 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
342 VARIABLES: Expanding 'var7' to 'var7'
343 VARIABLES: Found output 'letters_list', recursing.
344 VARIABLES: Expanding 'letters_list' to 'letters_list'
345 VARIABLES: Expanding '<(var7)' to 'letters_list'
346 VARIABLES: Found output 'ABCD letters_list', recursing.
347 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
348 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
349 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
350 VARIABLES: Expanding 'ABCD' to 'ABCD'
351 VARIABLES: Expanding 'letters_list' to 'letters_list'
352 VARIABLES: Expanding 'dummy' to 'dummy'
353 VARIABLES: Expanding 'target' to 'target'
354 VARIABLES: Expanding 'none' to 'none'
355 VARIABLES: Expanding 'commands-repeated.gyp' to 'commands-repeated.gyp'
356 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
357 VARIABLES: Expanding 'dummy' to 'dummy'
358 VARIABLES: Expanding 'target' to 'target'
359 VARIABLES: Expanding 'none' to 'none'
360 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
361 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
362 VARIABLES: Expanding 'letters_' to 'letters_'
363 VARIABLES: Expanding 'ABCD' to 'ABCD'
364 VARIABLES: Expanding 'list' to 'list'
365 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
366 VARIABLES: Expanding 'ABCD' to 'ABCD'
367 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
368 VARIABLES: Expanding '5blah' to '5blah'
369 VARIABLES: Expanding 'ABCD' to 'ABCD'
370 VARIABLES: Expanding 'ABCD' to 'ABCD'
371 VARIABLES: Expanding '13.0' to '13.0'
372 VARIABLES: Expanding '012' to '012'
373 VARIABLES: Expanding '+14' to '+14'
374 VARIABLES: Expanding '7 8 9' to '7 8 9'
375 VARIABLES: Expanding '11 ' to '11 '
376 VARIABLES: Expanding ' 10' to ' 10'
377 VARIABLES: Expanding 'foo' to 'foo'
378 VARIABLES: Expanding 'target' to 'target'
379 VARIABLES: Expanding 'none' to 'none'
380 VARIABLES: Expanding 'letters_list' to 'letters_list'
381 VARIABLES: Expanding 'test_action' to 'test_action'
382 VARIABLES: Expanding 'echo' to 'echo'
383 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
384 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
385 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
386 VARIABLES: Expanding 'ABCD' to 'ABCD'
387 VARIABLES: Expanding 'letters_list' to 'letters_list'
388 VARIABLES: Expanding 'letters_list' to 'letters_list'
389 VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
390 VARIABLES: Expanding 'echo' to 'echo'
391 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
392 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
393 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
394 VARIABLES: Expanding 'ABCD' to 'ABCD'
395 VARIABLES: Expanding 'letters_list' to 'letters_list'
396 VARIABLES: Expanding 'letters_list' to 'letters_list'
397 VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
398 VARIABLES: Expanding 'echo' to 'echo'
399 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
400 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
401 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
402 VARIABLES: Expanding 'ABCD' to 'ABCD'
403 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-72
mozc_build_tools/gyp/test/variables/commands/commands-repeated.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['commands-repeated.gyp', 'commands.gypi'],
2 'targets': [{'actions': [{'action': ['echo',
3 '"3.14159265359 ABCD"',
4 'ABCD letters_list'],
5 'action_name': 'test_action',
6 'inputs': ['3.14159265359 ABCD'],
7 'outputs': ['ABCD', 'letters_list'],
8 'variables': {'var7': 'letters_list'}},
9 {'action': ['echo',
10 '"3.14159265359 ABCD"',
11 'ABCD letters_list'],
12 'action_name': 'test_action_prime',
13 'inputs': ['3.14159265359 ABCD'],
14 'outputs': ['ABCD', 'letters_list'],
15 'variables': {'var7': 'letters_list'}},
16 {'action': ['echo',
17 '"3.14159265359 ABCD"',
18 'ABCD letters_list'],
19 'action_name': 'test_action_prime_prime',
20 'inputs': ['3.14159265359 ABCD'],
21 'outputs': ['ABCD', 'letters_list'],
22 'variables': {'var7': 'letters_list'}}],
23 'configurations': {'Default': {}},
24 'default_configuration': 'Default',
25 'target_name': 'foo',
26 'toolset': 'target',
27 'type': 'none',
28 'variables': {'var1': '3.14159265359',
29 'var10': '7 8 9',
30 'var11': ['7', '8', '9'],
31 'var12': ' 10',
32 'var13': '11 ',
33 'var14': '012',
34 'var15': '13.0',
35 'var16': '+14',
36 'var17': '-15',
37 'var18': '0',
38 'var1prime': '3.14159265359',
39 'var2': '3.14159265359 ABCD',
40 'var2prime': '3.14159265359 ABCD',
41 'var3': 'ABCD',
42 'var3prime': 'ABCD',
43 'var4': 'ABCD',
44 'var4prime': 'ABCD',
45 'var5': 'letters_',
46 'var6': 'list',
47 'var7': '5',
48 'var8': '5blah',
49 'var9': '6'}},
50 {'configurations': {'Default': {}},
51 'default_configuration': 'Default',
52 'target_name': 'dummy',
53 'toolset': 'target',
54 'type': 'none'}],
55 'variables': {'check_included': 'XYZ',
56 'check_int': '5',
57 'check_list_int': ['7', '8', '9'],
58 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
59 'check_str_int': '6',
60 'included_variable': 'XYZ',
61 'letters_list': 'ABCD',
62 'negative_int': '-15',
63 'not_int_1': ' 10',
64 'not_int_2': '11 ',
65 'not_int_3': '012',
66 'not_int_4': '13.0',
67 'not_int_5': '+14',
68 'other_letters': 'ABCDEFG',
69 'pi': 'import math; print math.pi',
70 'third_letters': 'ABCDEFGHIJK',
71 'zero_int': '0'}}
+0
-84
mozc_build_tools/gyp/test/variables/commands/commands.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a simple test file to make sure that variable substitution
5 # happens correctly. Run "run_tests.py" using python to generate the
6 # output from this gyp file.
7
8 {
9 'variables': {
10 'pi': 'import math; print math.pi',
11 'third_letters': "<(other_letters)HIJK",
12 'letters_list': 'ABCD',
13 'other_letters': '<(letters_list)EFG',
14 'check_included': '<(included_variable)',
15 'check_lists': [
16 '<(included_variable)',
17 '<(third_letters)',
18 ],
19 'check_int': 5,
20 'check_str_int': '6',
21 'check_list_int': [
22 7,
23 '8',
24 9,
25 ],
26 'not_int_1': ' 10',
27 'not_int_2': '11 ',
28 'not_int_3': '012',
29 'not_int_4': '13.0',
30 'not_int_5': '+14',
31 'negative_int': '-15',
32 'zero_int': '0',
33 },
34 'includes': [
35 'commands.gypi',
36 ],
37 'targets': [
38 {
39 'target_name': 'foo',
40 'type': 'none',
41 'variables': {
42 'var1': '<!(["python", "-c", "<(pi)"])',
43 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
44 'var3': '<!(python -c "print \'<(letters_list)\'")',
45 'var4': '<(<!(python -c "print \'letters_list\'"))',
46 'var5': 'letters_',
47 'var6': 'list',
48 'var7': '<(check_int)',
49 'var8': '<(check_int)blah',
50 'var9': '<(check_str_int)',
51 'var10': '<(check_list_int)',
52 'var11': ['<@(check_list_int)'],
53 'var12': '<(not_int_1)',
54 'var13': '<(not_int_2)',
55 'var14': '<(not_int_3)',
56 'var15': '<(not_int_4)',
57 'var16': '<(not_int_5)',
58 'var17': '<(negative_int)',
59 'var18': '<(zero_int)',
60 },
61 'actions': [
62 {
63 'action_name': 'test_action',
64 'variables': {
65 'var7': '<!(echo <(var5)<(var6))',
66 },
67 'inputs' : [
68 '<(var2)',
69 ],
70 'outputs': [
71 '<(var4)',
72 '<(var7)',
73 ],
74 'action': [
75 'echo',
76 '<(_inputs)',
77 '<(_outputs)',
78 ],
79 },
80 ],
81 },
82 ],
83 }
+0
-253
mozc_build_tools/gyp/test/variables/commands/commands.gyp.ignore-env.stdout less more
0 GENERAL: running with these options:
1 GENERAL: msvs_version: None
2 GENERAL: suffix: ''
3 GENERAL: includes: None
4 GENERAL: use_environment: False
5 GENERAL: depth: '.'
6 GENERAL: generator_flags: []
7 GENERAL: generator_output: None
8 GENERAL: formats: ['gypd']
9 GENERAL: debug: ['variables', 'general']
10 GENERAL: circular_check: True
11 GENERAL: check: None
12 GENERAL: defines: None
13 GENERAL: cmdline_default_variables: {}
14 GENERAL: generator_flags: {}
15 VARIABLES: Expanding '0' to 0
16 VARIABLES: Expanding '11 ' to '11 '
17 VARIABLES: Expanding '+14' to '+14'
18 VARIABLES: Expanding '-15' to -15
19 VARIABLES: Expanding ' 10' to ' 10'
20 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
21 VARIABLES: Expanding 'letters_list' to 'letters_list'
22 VARIABLES: Found output 'ABCDEFG', recursing.
23 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
24 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
25 VARIABLES: Expanding '012' to '012'
26 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
27 VARIABLES: Expanding 'other_letters' to 'other_letters'
28 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
29 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
30 VARIABLES: Expanding 'letters_list' to 'letters_list'
31 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
32 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
33 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding 'XYZ' to 'XYZ'
36 VARIABLES: Expanding 'ABCD' to 'ABCD'
37 VARIABLES: Expanding '13.0' to '13.0'
38 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
39 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
40 VARIABLES: Expanding 'included_variable' to 'included_variable'
41 VARIABLES: Found output 'XYZ', recursing.
42 VARIABLES: Expanding 'XYZ' to 'XYZ'
43 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
44 VARIABLES: Expanding '6' to 6
45 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
46 VARIABLES: Expanding 'included_variable' to 'included_variable'
47 VARIABLES: Found output 'XYZ', recursing.
48 VARIABLES: Expanding 'XYZ' to 'XYZ'
49 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
50 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
51 VARIABLES: Expanding 'third_letters' to 'third_letters'
52 VARIABLES: Found output '<(other_letters)HIJK', recursing.
53 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
54 VARIABLES: Expanding 'other_letters' to 'other_letters'
55 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
56 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
57 VARIABLES: Expanding 'letters_list' to 'letters_list'
58 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
59 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
60 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '8' to 8
64 VARIABLES: Expanding '.' to '.'
65 VARIABLES: Expanding 'letters_' to 'letters_'
66 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
67 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
68 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
69 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
70 VARIABLES: Found output 'letters_list', recursing.
71 VARIABLES: Expanding 'letters_list' to 'letters_list'
72 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
73 VARIABLES: Found output 'ABCD', recursing.
74 VARIABLES: Expanding 'ABCD' to 'ABCD'
75 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
76 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
77 VARIABLES: Expanding 'check_int' to 'check_int'
78 VARIABLES: Found output '5', recursing.
79 VARIABLES: Expanding '5' to 5
80 VARIABLES: Expanding '<(check_int)' to 5
81 VARIABLES: Expanding 'list' to 'list'
82 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
83 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
84 VARIABLES: Expanding 'pi' to 'pi'
85 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
86 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
87 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
89 VARIABLES: Found output '3.14159265359', recursing.
90 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
91 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
92 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
93 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
94 VARIABLES: Expanding 'letters_list' to 'letters_list'
95 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
96 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
97 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
98 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
99 VARIABLES: Found output 'ABCD', recursing.
100 VARIABLES: Expanding 'ABCD' to 'ABCD'
101 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
102 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
103 VARIABLES: Expanding 'letters_list' to 'letters_list'
104 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
105 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
106 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
107 VARIABLES: Expanding 'pi' to 'pi'
108 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
109 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
110 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
111 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
112 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
113 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
114 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
115 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
116 VARIABLES: Found output '3.14159265359 ABCD', recursing.
117 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
118 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
119 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
120 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
121 VARIABLES: Found output '6', recursing.
122 VARIABLES: Expanding '6' to 6
123 VARIABLES: Expanding '<(check_str_int)' to 6
124 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
125 VARIABLES: Expanding 'check_int' to 'check_int'
126 VARIABLES: Found output '5blah', recursing.
127 VARIABLES: Expanding '5blah' to '5blah'
128 VARIABLES: Expanding '<(check_int)blah' to '5blah'
129 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
130 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
131 VARIABLES: Found output '13.0', recursing.
132 VARIABLES: Expanding '13.0' to '13.0'
133 VARIABLES: Expanding '<(not_int_4)' to '13.0'
134 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
135 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
136 VARIABLES: Found output '012', recursing.
137 VARIABLES: Expanding '012' to '012'
138 VARIABLES: Expanding '<(not_int_3)' to '012'
139 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
140 VARIABLES: Expanding 'negative_int' to 'negative_int'
141 VARIABLES: Found output '-15', recursing.
142 VARIABLES: Expanding '-15' to -15
143 VARIABLES: Expanding '<(negative_int)' to -15
144 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
145 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
146 VARIABLES: Found output '+14', recursing.
147 VARIABLES: Expanding '+14' to '+14'
148 VARIABLES: Expanding '<(not_int_5)' to '+14'
149 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
150 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
151 VARIABLES: Found output '7 8 9', recursing.
152 VARIABLES: Expanding '7 8 9' to '7 8 9'
153 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
154 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
155 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
156 VARIABLES: Found output '11 ', recursing.
157 VARIABLES: Expanding '11 ' to '11 '
158 VARIABLES: Expanding '<(not_int_2)' to '11 '
159 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
160 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
161 VARIABLES: Found output ' 10', recursing.
162 VARIABLES: Expanding ' 10' to ' 10'
163 VARIABLES: Expanding '<(not_int_1)' to ' 10'
164 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
165 VARIABLES: Expanding 'zero_int' to 'zero_int'
166 VARIABLES: Found output '0', recursing.
167 VARIABLES: Expanding '0' to 0
168 VARIABLES: Expanding '<(zero_int)' to 0
169 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
170 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
171 VARIABLES: Found output [7, 8, 9], recursing.
172 VARIABLES: Expanding 7 to 7
173 VARIABLES: Expanding 8 to 8
174 VARIABLES: Expanding 9 to 9
175 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
176 VARIABLES: Expanding 'foo' to 'foo'
177 VARIABLES: Expanding 'target' to 'target'
178 VARIABLES: Expanding 'none' to 'none'
179 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
180 VARIABLES: Expanding 'var6' to 'var6'
181 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
182 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
183 VARIABLES: Expanding 'var5' to 'var5'
184 VARIABLES: Found output 'echo letters_list', recursing.
185 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
186 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
187 VARIABLES: Executing command 'echo letters_list' in directory 'None'
188 VARIABLES: Found output 'letters_list', recursing.
189 VARIABLES: Expanding 'letters_list' to 'letters_list'
190 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
191 VARIABLES: Expanding 'test_action' to 'test_action'
192 VARIABLES: Expanding 'echo' to 'echo'
193 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
194 VARIABLES: Expanding '_inputs' to '_inputs'
195 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
196 VARIABLES: Expanding 'var2' to 'var2'
197 VARIABLES: Found output '3.14159265359 ABCD', recursing.
198 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
199 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
200 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
201 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
202 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
203 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
204 VARIABLES: Expanding '_outputs' to '_outputs'
205 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
206 VARIABLES: Expanding 'var4' to 'var4'
207 VARIABLES: Found output 'ABCD', recursing.
208 VARIABLES: Expanding 'ABCD' to 'ABCD'
209 VARIABLES: Expanding '<(var4)' to 'ABCD'
210 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
211 VARIABLES: Expanding 'var7' to 'var7'
212 VARIABLES: Found output 'letters_list', recursing.
213 VARIABLES: Expanding 'letters_list' to 'letters_list'
214 VARIABLES: Expanding '<(var7)' to 'letters_list'
215 VARIABLES: Found output 'ABCD letters_list', recursing.
216 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
217 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
218 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
219 VARIABLES: Expanding 'ABCD' to 'ABCD'
220 VARIABLES: Expanding 'letters_list' to 'letters_list'
221 VARIABLES: Expanding 'dummy' to 'dummy'
222 VARIABLES: Expanding 'target' to 'target'
223 VARIABLES: Expanding 'none' to 'none'
224 VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
225 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
226 VARIABLES: Expanding 'dummy' to 'dummy'
227 VARIABLES: Expanding 'target' to 'target'
228 VARIABLES: Expanding 'none' to 'none'
229 VARIABLES: Expanding 'letters_' to 'letters_'
230 VARIABLES: Expanding 'ABCD' to 'ABCD'
231 VARIABLES: Expanding 'list' to 'list'
232 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
233 VARIABLES: Expanding 'ABCD' to 'ABCD'
234 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
235 VARIABLES: Expanding '5blah' to '5blah'
236 VARIABLES: Expanding '13.0' to '13.0'
237 VARIABLES: Expanding '012' to '012'
238 VARIABLES: Expanding '+14' to '+14'
239 VARIABLES: Expanding '7 8 9' to '7 8 9'
240 VARIABLES: Expanding '11 ' to '11 '
241 VARIABLES: Expanding ' 10' to ' 10'
242 VARIABLES: Expanding 'foo' to 'foo'
243 VARIABLES: Expanding 'target' to 'target'
244 VARIABLES: Expanding 'none' to 'none'
245 VARIABLES: Expanding 'letters_list' to 'letters_list'
246 VARIABLES: Expanding 'test_action' to 'test_action'
247 VARIABLES: Expanding 'echo' to 'echo'
248 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
249 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
250 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
251 VARIABLES: Expanding 'ABCD' to 'ABCD'
252 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-253
mozc_build_tools/gyp/test/variables/commands/commands.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: msvs_version: None
2 GENERAL: suffix: ''
3 GENERAL: includes: None
4 GENERAL: use_environment: True
5 GENERAL: depth: '.'
6 GENERAL: generator_flags: []
7 GENERAL: generator_output: None
8 GENERAL: formats: ['gypd']
9 GENERAL: debug: ['variables', 'general']
10 GENERAL: circular_check: True
11 GENERAL: check: None
12 GENERAL: defines: None
13 GENERAL: cmdline_default_variables: {}
14 GENERAL: generator_flags: {}
15 VARIABLES: Expanding '0' to 0
16 VARIABLES: Expanding '11 ' to '11 '
17 VARIABLES: Expanding '+14' to '+14'
18 VARIABLES: Expanding '-15' to -15
19 VARIABLES: Expanding ' 10' to ' 10'
20 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
21 VARIABLES: Expanding 'letters_list' to 'letters_list'
22 VARIABLES: Found output 'ABCDEFG', recursing.
23 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
24 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
25 VARIABLES: Expanding '012' to '012'
26 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
27 VARIABLES: Expanding 'other_letters' to 'other_letters'
28 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
29 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
30 VARIABLES: Expanding 'letters_list' to 'letters_list'
31 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
32 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
33 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding 'XYZ' to 'XYZ'
36 VARIABLES: Expanding 'ABCD' to 'ABCD'
37 VARIABLES: Expanding '13.0' to '13.0'
38 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
39 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
40 VARIABLES: Expanding 'included_variable' to 'included_variable'
41 VARIABLES: Found output 'XYZ', recursing.
42 VARIABLES: Expanding 'XYZ' to 'XYZ'
43 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
44 VARIABLES: Expanding '6' to 6
45 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
46 VARIABLES: Expanding 'included_variable' to 'included_variable'
47 VARIABLES: Found output 'XYZ', recursing.
48 VARIABLES: Expanding 'XYZ' to 'XYZ'
49 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
50 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
51 VARIABLES: Expanding 'third_letters' to 'third_letters'
52 VARIABLES: Found output '<(other_letters)HIJK', recursing.
53 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
54 VARIABLES: Expanding 'other_letters' to 'other_letters'
55 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
56 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
57 VARIABLES: Expanding 'letters_list' to 'letters_list'
58 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
59 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
60 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '8' to 8
64 VARIABLES: Expanding '.' to '.'
65 VARIABLES: Expanding 'letters_' to 'letters_'
66 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
67 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
68 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
69 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
70 VARIABLES: Found output 'letters_list', recursing.
71 VARIABLES: Expanding 'letters_list' to 'letters_list'
72 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
73 VARIABLES: Found output 'ABCD', recursing.
74 VARIABLES: Expanding 'ABCD' to 'ABCD'
75 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
76 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
77 VARIABLES: Expanding 'check_int' to 'check_int'
78 VARIABLES: Found output '5', recursing.
79 VARIABLES: Expanding '5' to 5
80 VARIABLES: Expanding '<(check_int)' to 5
81 VARIABLES: Expanding 'list' to 'list'
82 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
83 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
84 VARIABLES: Expanding 'pi' to 'pi'
85 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
86 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
87 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
89 VARIABLES: Found output '3.14159265359', recursing.
90 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
91 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
92 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
93 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
94 VARIABLES: Expanding 'letters_list' to 'letters_list'
95 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
96 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
97 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
98 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
99 VARIABLES: Found output 'ABCD', recursing.
100 VARIABLES: Expanding 'ABCD' to 'ABCD'
101 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
102 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
103 VARIABLES: Expanding 'letters_list' to 'letters_list'
104 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
105 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
106 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
107 VARIABLES: Expanding 'pi' to 'pi'
108 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
109 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
110 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
111 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
112 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
113 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
114 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
115 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
116 VARIABLES: Found output '3.14159265359 ABCD', recursing.
117 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
118 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
119 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
120 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
121 VARIABLES: Found output '6', recursing.
122 VARIABLES: Expanding '6' to 6
123 VARIABLES: Expanding '<(check_str_int)' to 6
124 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
125 VARIABLES: Expanding 'check_int' to 'check_int'
126 VARIABLES: Found output '5blah', recursing.
127 VARIABLES: Expanding '5blah' to '5blah'
128 VARIABLES: Expanding '<(check_int)blah' to '5blah'
129 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
130 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
131 VARIABLES: Found output '13.0', recursing.
132 VARIABLES: Expanding '13.0' to '13.0'
133 VARIABLES: Expanding '<(not_int_4)' to '13.0'
134 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
135 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
136 VARIABLES: Found output '012', recursing.
137 VARIABLES: Expanding '012' to '012'
138 VARIABLES: Expanding '<(not_int_3)' to '012'
139 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
140 VARIABLES: Expanding 'negative_int' to 'negative_int'
141 VARIABLES: Found output '-15', recursing.
142 VARIABLES: Expanding '-15' to -15
143 VARIABLES: Expanding '<(negative_int)' to -15
144 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
145 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
146 VARIABLES: Found output '+14', recursing.
147 VARIABLES: Expanding '+14' to '+14'
148 VARIABLES: Expanding '<(not_int_5)' to '+14'
149 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
150 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
151 VARIABLES: Found output '7 8 9', recursing.
152 VARIABLES: Expanding '7 8 9' to '7 8 9'
153 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
154 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
155 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
156 VARIABLES: Found output '11 ', recursing.
157 VARIABLES: Expanding '11 ' to '11 '
158 VARIABLES: Expanding '<(not_int_2)' to '11 '
159 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
160 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
161 VARIABLES: Found output ' 10', recursing.
162 VARIABLES: Expanding ' 10' to ' 10'
163 VARIABLES: Expanding '<(not_int_1)' to ' 10'
164 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
165 VARIABLES: Expanding 'zero_int' to 'zero_int'
166 VARIABLES: Found output '0', recursing.
167 VARIABLES: Expanding '0' to 0
168 VARIABLES: Expanding '<(zero_int)' to 0
169 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
170 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
171 VARIABLES: Found output [7, 8, 9], recursing.
172 VARIABLES: Expanding 7 to 7
173 VARIABLES: Expanding 8 to 8
174 VARIABLES: Expanding 9 to 9
175 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
176 VARIABLES: Expanding 'foo' to 'foo'
177 VARIABLES: Expanding 'target' to 'target'
178 VARIABLES: Expanding 'none' to 'none'
179 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
180 VARIABLES: Expanding 'var6' to 'var6'
181 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
182 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
183 VARIABLES: Expanding 'var5' to 'var5'
184 VARIABLES: Found output 'echo letters_list', recursing.
185 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
186 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
187 VARIABLES: Executing command 'echo letters_list' in directory 'None'
188 VARIABLES: Found output 'letters_list', recursing.
189 VARIABLES: Expanding 'letters_list' to 'letters_list'
190 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
191 VARIABLES: Expanding 'test_action' to 'test_action'
192 VARIABLES: Expanding 'echo' to 'echo'
193 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
194 VARIABLES: Expanding '_inputs' to '_inputs'
195 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
196 VARIABLES: Expanding 'var2' to 'var2'
197 VARIABLES: Found output '3.14159265359 ABCD', recursing.
198 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
199 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
200 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
201 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
202 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
203 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
204 VARIABLES: Expanding '_outputs' to '_outputs'
205 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
206 VARIABLES: Expanding 'var4' to 'var4'
207 VARIABLES: Found output 'ABCD', recursing.
208 VARIABLES: Expanding 'ABCD' to 'ABCD'
209 VARIABLES: Expanding '<(var4)' to 'ABCD'
210 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
211 VARIABLES: Expanding 'var7' to 'var7'
212 VARIABLES: Found output 'letters_list', recursing.
213 VARIABLES: Expanding 'letters_list' to 'letters_list'
214 VARIABLES: Expanding '<(var7)' to 'letters_list'
215 VARIABLES: Found output 'ABCD letters_list', recursing.
216 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
217 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
218 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
219 VARIABLES: Expanding 'ABCD' to 'ABCD'
220 VARIABLES: Expanding 'letters_list' to 'letters_list'
221 VARIABLES: Expanding 'dummy' to 'dummy'
222 VARIABLES: Expanding 'target' to 'target'
223 VARIABLES: Expanding 'none' to 'none'
224 VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
225 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
226 VARIABLES: Expanding 'dummy' to 'dummy'
227 VARIABLES: Expanding 'target' to 'target'
228 VARIABLES: Expanding 'none' to 'none'
229 VARIABLES: Expanding 'letters_' to 'letters_'
230 VARIABLES: Expanding 'ABCD' to 'ABCD'
231 VARIABLES: Expanding 'list' to 'list'
232 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
233 VARIABLES: Expanding 'ABCD' to 'ABCD'
234 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
235 VARIABLES: Expanding '5blah' to '5blah'
236 VARIABLES: Expanding '13.0' to '13.0'
237 VARIABLES: Expanding '012' to '012'
238 VARIABLES: Expanding '+14' to '+14'
239 VARIABLES: Expanding '7 8 9' to '7 8 9'
240 VARIABLES: Expanding '11 ' to '11 '
241 VARIABLES: Expanding ' 10' to ' 10'
242 VARIABLES: Expanding 'foo' to 'foo'
243 VARIABLES: Expanding 'target' to 'target'
244 VARIABLES: Expanding 'none' to 'none'
245 VARIABLES: Expanding 'letters_list' to 'letters_list'
246 VARIABLES: Expanding 'test_action' to 'test_action'
247 VARIABLES: Expanding 'echo' to 'echo'
248 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
249 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
250 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
251 VARIABLES: Expanding 'ABCD' to 'ABCD'
252 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-54
mozc_build_tools/gyp/test/variables/commands/commands.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['commands.gyp', 'commands.gypi'],
2 'targets': [{'actions': [{'action': ['echo',
3 '"3.14159265359 ABCD"',
4 'ABCD letters_list'],
5 'action_name': 'test_action',
6 'inputs': ['3.14159265359 ABCD'],
7 'outputs': ['ABCD', 'letters_list'],
8 'variables': {'var7': 'letters_list'}}],
9 'configurations': {'Default': {}},
10 'default_configuration': 'Default',
11 'target_name': 'foo',
12 'toolset': 'target',
13 'type': 'none',
14 'variables': {'var1': '3.14159265359',
15 'var10': '7 8 9',
16 'var11': ['7', '8', '9'],
17 'var12': ' 10',
18 'var13': '11 ',
19 'var14': '012',
20 'var15': '13.0',
21 'var16': '+14',
22 'var17': '-15',
23 'var18': '0',
24 'var2': '3.14159265359 ABCD',
25 'var3': 'ABCD',
26 'var4': 'ABCD',
27 'var5': 'letters_',
28 'var6': 'list',
29 'var7': '5',
30 'var8': '5blah',
31 'var9': '6'}},
32 {'configurations': {'Default': {}},
33 'default_configuration': 'Default',
34 'target_name': 'dummy',
35 'toolset': 'target',
36 'type': 'none'}],
37 'variables': {'check_included': 'XYZ',
38 'check_int': '5',
39 'check_list_int': ['7', '8', '9'],
40 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
41 'check_str_int': '6',
42 'included_variable': 'XYZ',
43 'letters_list': 'ABCD',
44 'negative_int': '-15',
45 'not_int_1': ' 10',
46 'not_int_2': '11 ',
47 'not_int_3': '012',
48 'not_int_4': '13.0',
49 'not_int_5': '+14',
50 'other_letters': 'ABCDEFG',
51 'pi': 'import math; print math.pi',
52 'third_letters': 'ABCDEFGHIJK',
53 'zero_int': '0'}}
+0
-16
mozc_build_tools/gyp/test/variables/commands/commands.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This file is included from commands.gyp to test evaluation order of includes.
5 {
6 'variables': {
7 'included_variable': 'XYZ',
8 },
9 'targets': [
10 {
11 'target_name': 'dummy',
12 'type': 'none',
13 },
14 ],
15 }
+0
-51
mozc_build_tools/gyp/test/variables/commands/gyptest-commands-ignore-env.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test that environment variables are ignored when --ignore-environment is
8 specified.
9 """
10
11 import os
12
13 import TestGyp
14
15 os.environ['GYP_DEFINES'] = 'FOO=BAR'
16 os.environ['GYP_GENERATORS'] = 'foo'
17 os.environ['GYP_GENERATOR_FLAGS'] = 'genflag=foo'
18 os.environ['GYP_GENERATOR_OUTPUT'] = 'somedir'
19
20 test = TestGyp.TestGyp(format='gypd')
21
22 expect = test.read('commands.gyp.ignore-env.stdout').replace('\r', '')
23
24 # Set $HOME so that gyp doesn't read the user's actual
25 # ~/.gyp/include.gypi file, which may contain variables
26 # and other settings that would change the output.
27 os.environ['HOME'] = test.workpath()
28
29 test.run_gyp('commands.gyp',
30 '--debug', 'variables', '--debug', 'general',
31 '--ignore-environment',
32 stdout=expect)
33
34 # Verify the commands.gypd against the checked-in expected contents.
35 #
36 # Normally, we should canonicalize line endings in the expected
37 # contents file setting the Subversion svn:eol-style to native,
38 # but that would still fail if multiple systems are sharing a single
39 # workspace on a network-mounted file system. Consequently, we
40 # massage the Windows line endings ('\r\n') in the output to the
41 # checked-in UNIX endings ('\n').
42
43 contents = test.read('commands.gypd').replace('\r', '')
44 expect = test.read('commands.gypd.golden').replace('\r', '')
45 if not test.match(contents, expect):
46 print "Unexpected contents of `commands.gypd'"
47 test.diff(expect, contents, 'commands.gypd ')
48 test.fail_test()
49
50 test.pass_test()
+0
-45
mozc_build_tools/gyp/test/variables/commands/gyptest-commands-repeated.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<!()' syntax commands where they are evaluated
8 more then once..
9 """
10
11 import os
12
13 import TestGyp
14
15 test = TestGyp.TestGyp(format='gypd')
16
17 expect = test.read('commands-repeated.gyp.stdout').replace('\r', '')
18
19 # Set $HOME so that gyp doesn't read the user's actual
20 # ~/.gyp/include.gypi file, which may contain variables
21 # and other settings that would change the output.
22 os.environ['HOME'] = test.workpath()
23
24 test.run_gyp('commands-repeated.gyp',
25 '--debug', 'variables', '--debug', 'general',
26 stdout=expect)
27
28 # Verify the commands-repeated.gypd against the checked-in expected contents.
29 #
30 # Normally, we should canonicalize line endings in the expected
31 # contents file setting the Subversion svn:eol-style to native,
32 # but that would still fail if multiple systems are sharing a single
33 # workspace on a network-mounted file system. Consequently, we
34 # massage the Windows line endings ('\r\n') in the output to the
35 # checked-in UNIX endings ('\n').
36
37 contents = test.read('commands-repeated.gypd').replace('\r', '')
38 expect = test.read('commands-repeated.gypd.golden').replace('\r', '')
39 if not test.match(contents, expect):
40 print "Unexpected contents of `commands-repeated.gypd'"
41 test.diff(expect, contents, 'commands-repeated.gypd ')
42 test.fail_test()
43
44 test.pass_test()
+0
-44
mozc_build_tools/gyp/test/variables/commands/gyptest-commands.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<!()' syntax commands.
8 """
9
10 import os
11
12 import TestGyp
13
14 test = TestGyp.TestGyp(format='gypd')
15
16 expect = test.read('commands.gyp.stdout').replace('\r', '')
17
18 # Set $HOME so that gyp doesn't read the user's actual
19 # ~/.gyp/include.gypi file, which may contain variables
20 # and other settings that would change the output.
21 os.environ['HOME'] = test.workpath()
22
23 test.run_gyp('commands.gyp',
24 '--debug', 'variables', '--debug', 'general',
25 stdout=expect)
26
27 # Verify the commands.gypd against the checked-in expected contents.
28 #
29 # Normally, we should canonicalize line endings in the expected
30 # contents file setting the Subversion svn:eol-style to native,
31 # but that would still fail if multiple systems are sharing a single
32 # workspace on a network-mounted file system. Consequently, we
33 # massage the Windows line endings ('\r\n') in the output to the
34 # checked-in UNIX endings ('\n').
35
36 contents = test.read('commands.gypd').replace('\r', '')
37 expect = test.read('commands.gypd.golden').replace('\r', '')
38 if not test.match(contents, expect):
39 print "Unexpected contents of `commands.gypd'"
40 test.diff(expect, contents, 'commands.gypd ')
41 test.fail_test()
42
43 test.pass_test()
+0
-11
mozc_build_tools/gyp/test/variables/commands/update_golden less more
0 #!/bin/bash
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 python ../../../gyp --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.stdout
7 python ../../../gyp --ignore-environment --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout
8 cp -f commands.gypd commands.gypd.golden
9 python ../../../gyp --debug variables --debug general --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout
10 cp -f commands-repeated.gypd commands-repeated.gypd.golden
+0
-173
mozc_build_tools/gyp/test/variables/filelist/filelist.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: msvs_version: None
2 GENERAL: suffix: ''
3 GENERAL: includes: None
4 GENERAL: use_environment: True
5 GENERAL: depth: '.'
6 GENERAL: generator_flags: []
7 GENERAL: generator_output: None
8 GENERAL: formats: ['gypd']
9 GENERAL: debug: ['variables', 'general']
10 GENERAL: circular_check: True
11 GENERAL: check: None
12 GENERAL: defines: None
13 GENERAL: cmdline_default_variables: {}
14 GENERAL: generator_flags: {}
15 VARIABLES: Expanding 'exclude' to 'exclude'
16 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
17 VARIABLES: Expanding 'include' to 'include'
18 VARIABLES: Expanding '.*dt' to '.*dt'
19 VARIABLES: Expanding 'exclude' to 'exclude'
20 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
21 VARIABLES: Expanding 'John' to 'John'
22 VARIABLES: Expanding 'Jacob' to 'Jacob'
23 VARIABLES: Expanding 'Astor' to 'Astor'
24 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
25 VARIABLES: Expanding 'Jerome' to 'Jerome'
26 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
27 VARIABLES: Expanding 'Schultz' to 'Schultz'
28 VARIABLES: Expanding 'Astor' to 'Astor'
29 VARIABLES: Expanding '.' to '.'
30 VARIABLES: Matches: {'content': 'names.txt <@(names', 'is_array': '', 'type': '<|', 'replace': '<|(names.txt <@(names)'}
31 VARIABLES: Matches: {'content': 'names', 'is_array': '', 'type': '<@', 'replace': '<@(names)'}
32 VARIABLES: Expanding 'names' to 'names'
33 VARIABLES: Expanding 'John' to 'John'
34 VARIABLES: Expanding 'Jacob' to 'Jacob'
35 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
36 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
37 VARIABLES: Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing.
38 VARIABLES: Expanding 'names.txt John Jacob Jingleheimer Schmidt' to 'names.txt John Jacob Jingleheimer Schmidt'
39 VARIABLES: Expanding 'names.txt <@(names)' to 'names.txt John Jacob Jingleheimer Schmidt'
40 VARIABLES: Found output 'names.txt', recursing.
41 VARIABLES: Expanding 'names.txt' to 'names.txt'
42 VARIABLES: Expanding '<|(names.txt <@(names))' to 'names.txt'
43 VARIABLES: Expanding 'foo' to 'foo'
44 VARIABLES: Expanding 'target' to 'target'
45 VARIABLES: Expanding 'none' to 'none'
46 VARIABLES: Expanding 'test_action' to 'test_action'
47 VARIABLES: Expanding 'python' to 'python'
48 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
49 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
50 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
51 VARIABLES: Found output 'names.txt', recursing.
52 VARIABLES: Expanding 'names.txt' to 'names.txt'
53 VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
54 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
55 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
56 VARIABLES: Found output 'names.txt', recursing.
57 VARIABLES: Expanding 'names.txt' to 'names.txt'
58 VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
59 VARIABLES: Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(names_listfile)'}
60 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
61 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
62 VARIABLES: Found output 'cat names.txt', recursing.
63 VARIABLES: Expanding 'cat names.txt' to 'cat names.txt'
64 VARIABLES: Expanding 'cat <(names_listfile)' to 'cat names.txt'
65 VARIABLES: Executing command 'cat names.txt' in directory 'src'
66 VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
67 VARIABLES: Expanding 'John' to 'John'
68 VARIABLES: Expanding 'Jacob' to 'Jacob'
69 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
70 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
71 VARIABLES: Expanding '<!@(cat <(names_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
72 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
73 VARIABLES: Matches: {'content': 'sources.txt <@(_sources', 'is_array': '', 'type': '<|', 'replace': '<|(sources.txt <@(_sources)'}
74 VARIABLES: Matches: {'content': '_sources', 'is_array': '', 'type': '<@', 'replace': '<@(_sources)'}
75 VARIABLES: Expanding '_sources' to '_sources'
76 VARIABLES: Expanding 'John' to 'John'
77 VARIABLES: Expanding 'Jacob' to 'Jacob'
78 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
79 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
80 VARIABLES: Found output 'sources.txt John Jacob Jingleheimer Schmidt', recursing.
81 VARIABLES: Expanding 'sources.txt John Jacob Jingleheimer Schmidt' to 'sources.txt John Jacob Jingleheimer Schmidt'
82 VARIABLES: Expanding 'sources.txt <@(_sources)' to 'sources.txt John Jacob Jingleheimer Schmidt'
83 VARIABLES: Found output 'sources.txt', recursing.
84 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
85 VARIABLES: Expanding '<|(sources.txt <@(_sources))' to 'sources.txt'
86 VARIABLES: Expanding 'bar' to 'bar'
87 VARIABLES: Expanding 'target' to 'target'
88 VARIABLES: Expanding 'none' to 'none'
89 VARIABLES: Expanding 'exclude' to 'exclude'
90 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
91 VARIABLES: Expanding 'include' to 'include'
92 VARIABLES: Expanding '.*dt' to '.*dt'
93 VARIABLES: Expanding 'exclude' to 'exclude'
94 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
95 VARIABLES: Expanding 'Astor' to 'Astor'
96 VARIABLES: Expanding 'test_action' to 'test_action'
97 VARIABLES: Expanding 'python' to 'python'
98 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
99 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
100 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
101 VARIABLES: Found output 'sources.txt', recursing.
102 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
103 VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
104 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
105 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
106 VARIABLES: Found output 'sources.txt', recursing.
107 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
108 VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
109 VARIABLES: Matches: {'content': 'cat <(sources_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(sources_listfile)'}
110 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
111 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
112 VARIABLES: Found output 'cat sources.txt', recursing.
113 VARIABLES: Expanding 'cat sources.txt' to 'cat sources.txt'
114 VARIABLES: Expanding 'cat <(sources_listfile)' to 'cat sources.txt'
115 VARIABLES: Executing command 'cat sources.txt' in directory 'src'
116 VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
117 VARIABLES: Expanding 'John' to 'John'
118 VARIABLES: Expanding 'Jacob' to 'Jacob'
119 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
120 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
121 VARIABLES: Expanding '<!@(cat <(sources_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
122 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
123 VARIABLES: Expanding 'John' to 'John'
124 VARIABLES: Expanding 'Jacob' to 'Jacob'
125 VARIABLES: Expanding 'Astor' to 'Astor'
126 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
127 VARIABLES: Expanding 'Jerome' to 'Jerome'
128 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
129 VARIABLES: Expanding 'Schultz' to 'Schultz'
130 VARIABLES: Expanding 'filelist.gyp' to 'filelist.gyp'
131 VARIABLES: Expanding 'names.txt' to 'names.txt'
132 VARIABLES: Expanding 'foo' to 'foo'
133 VARIABLES: Expanding 'target' to 'target'
134 VARIABLES: Expanding 'none' to 'none'
135 VARIABLES: Expanding 'test_action' to 'test_action'
136 VARIABLES: Expanding 'python' to 'python'
137 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
138 VARIABLES: Expanding 'names.txt' to 'names.txt'
139 VARIABLES: Expanding 'names.txt' to 'names.txt'
140 VARIABLES: Expanding 'John' to 'John'
141 VARIABLES: Expanding 'Jacob' to 'Jacob'
142 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
143 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
144 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
145 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
146 VARIABLES: Expanding 'bar' to 'bar'
147 VARIABLES: Expanding 'target' to 'target'
148 VARIABLES: Expanding 'none' to 'none'
149 VARIABLES: Expanding 'exclude' to 'exclude'
150 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
151 VARIABLES: Expanding 'include' to 'include'
152 VARIABLES: Expanding '.*dt' to '.*dt'
153 VARIABLES: Expanding 'exclude' to 'exclude'
154 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
155 VARIABLES: Expanding 'Astor' to 'Astor'
156 VARIABLES: Expanding 'test_action' to 'test_action'
157 VARIABLES: Expanding 'python' to 'python'
158 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
159 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
160 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
161 VARIABLES: Expanding 'John' to 'John'
162 VARIABLES: Expanding 'Jacob' to 'Jacob'
163 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
164 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
165 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
166 VARIABLES: Expanding 'John' to 'John'
167 VARIABLES: Expanding 'Jacob' to 'Jacob'
168 VARIABLES: Expanding 'Astor' to 'Astor'
169 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
170 VARIABLES: Expanding 'Jerome' to 'Jerome'
171 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
172 VARIABLES: Expanding 'Schultz' to 'Schultz'
+0
-43
mozc_build_tools/gyp/test/variables/filelist/filelist.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['filelist.gyp'],
2 'targets': [{'actions': [{'action': ['python', 'dummy.py', 'names.txt'],
3 'action_name': 'test_action',
4 'inputs': ['names.txt',
5 'John',
6 'Jacob',
7 'Jingleheimer',
8 'Schmidt'],
9 'outputs': ['dummy_foo']}],
10 'configurations': {'Default': {}},
11 'default_configuration': 'Default',
12 'target_name': 'foo',
13 'toolset': 'target',
14 'type': 'none',
15 'variables': {'names_listfile': 'names.txt'}},
16 {'actions': [{'action': ['python', 'dummy.py', 'sources.txt'],
17 'action_name': 'test_action',
18 'inputs': ['sources.txt',
19 'John',
20 'Jacob',
21 'Jingleheimer',
22 'Schmidt'],
23 'outputs': ['dummy_foo']}],
24 'configurations': {'Default': {}},
25 'default_configuration': 'Default',
26 'sources': ['John', 'Jacob', 'Jingleheimer', 'Schmidt'],
27 'sources_excluded': ['Astor', 'Jerome', 'Schultz'],
28 'target_name': 'bar',
29 'toolset': 'target',
30 'type': 'none',
31 'variables': {'sources_listfile': 'sources.txt'}}],
32 'variables': {'names': ['John',
33 'Jacob',
34 'Astor',
35 'Jingleheimer',
36 'Jerome',
37 'Schmidt',
38 'Schultz'],
39 'names!': ['Astor'],
40 'names/': [['exclude', 'Sch.*'],
41 ['include', '.*dt'],
42 ['exclude', 'Jer.*']]}}
+0
-55
mozc_build_tools/gyp/test/variables/filelist/gyptest-filelist.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<|(list.txt ...)' syntax commands.
8 """
9
10 import os
11 import sys
12
13 import TestGyp
14
15 test = TestGyp.TestGyp(format='gypd')
16
17 expect = test.read('filelist.gyp.stdout')
18 if sys.platform == 'win32':
19 expect = expect.replace('/', r'\\').replace('\r', '')
20
21 # Set $HOME so that gyp doesn't read the user's actual
22 # ~/.gyp/include.gypi file, which may contain variables
23 # and other settings that would change the output.
24 os.environ['HOME'] = test.workpath()
25
26 test.run_gyp('src/filelist.gyp',
27 '--debug', 'variables', '--debug', 'general',
28 stdout=expect)
29
30 # Verify the filelist.gypd against the checked-in expected contents.
31 #
32 # Normally, we should canonicalize line endings in the expected
33 # contents file setting the Subversion svn:eol-style to native,
34 # but that would still fail if multiple systems are sharing a single
35 # workspace on a network-mounted file system. Consequently, we
36 # massage the Windows line endings ('\r\n') in the output to the
37 # checked-in UNIX endings ('\n').
38
39 contents = test.read('src/filelist.gypd').replace(
40 '\r', '').replace('\\\\', '/')
41 expect = test.read('filelist.gypd.golden').replace('\r', '')
42 if not test.match(contents, expect):
43 print "Unexpected contents of `src/filelist.gypd'"
44 test.diff(expect, contents, 'src/filelist.gypd ')
45 test.fail_test()
46
47 contents = test.read('src/names.txt')
48 expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
49 if not test.match(contents, expect):
50 print "Unexpected contents of `src/names.txt'"
51 test.diff(expect, contents, 'src/names.txt ')
52 test.fail_test()
53
54 test.pass_test()
+0
-93
mozc_build_tools/gyp/test/variables/filelist/src/filelist.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a test to make sure that <|(foo.txt a b c) generates
5 # a pre-calculated file list at gyp time and returns foo.txt.
6 # This feature is useful to work around limits in the number of arguments that
7 # can be passed to rule/action.
8
9 {
10 'variables': {
11 'names': [
12 'John',
13 'Jacob',
14 'Astor',
15 'Jingleheimer',
16 'Jerome',
17 'Schmidt',
18 'Schultz',
19 ],
20 'names!': [
21 'Astor',
22 ],
23 'names/': [
24 ['exclude', 'Sch.*'],
25 ['include', '.*dt'],
26 ['exclude', 'Jer.*'],
27 ],
28 },
29 'targets': [
30 {
31 'target_name': 'foo',
32 'type': 'none',
33 'variables': {
34 'names_listfile': '<|(names.txt <@(names))',
35 },
36 'actions': [
37 {
38 'action_name': 'test_action',
39 'inputs' : [
40 '<(names_listfile)',
41 '<!@(cat <(names_listfile))',
42 ],
43 'outputs': [
44 'dummy_foo',
45 ],
46 'action': [
47 'python', 'dummy.py', '<(names_listfile)',
48 ],
49 },
50 ],
51 },
52 {
53 'target_name': 'bar',
54 'type': 'none',
55 'sources': [
56 'John',
57 'Jacob',
58 'Astor',
59 'Jingleheimer',
60 'Jerome',
61 'Schmidt',
62 'Schultz',
63 ],
64 'sources!': [
65 'Astor',
66 ],
67 'sources/': [
68 ['exclude', 'Sch.*'],
69 ['include', '.*dt'],
70 ['exclude', 'Jer.*'],
71 ],
72 'variables': {
73 'sources_listfile': '<|(sources.txt <@(_sources))',
74 },
75 'actions': [
76 {
77 'action_name': 'test_action',
78 'inputs' : [
79 '<(sources_listfile)',
80 '<!@(cat <(sources_listfile))',
81 ],
82 'outputs': [
83 'dummy_foo',
84 ],
85 'action': [
86 'python', 'dummy.py', '<(sources_listfile)',
87 ],
88 },
89 ],
90 },
91 ],
92 }
+0
-8
mozc_build_tools/gyp/test/variables/filelist/update_golden less more
0 #!/bin/bash
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 python ../../../gyp --debug variables --debug general --format gypd --depth . src/filelist.gyp > filelist.gyp.stdout
7 cp -f src/filelist.gypd filelist.gypd.golden
+0
-45
mozc_build_tools/gyp/test/variants/gyptest-variants.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify handling of build variants.
8
9 TODO: Right now, only the SCons generator supports this, so the
10 test case is SCons-specific. In particular, it relise on SCons'
11 ability to rebuild in response to changes on the command line. It
12 may be simpler to just drop this feature if the other generators
13 can't be made to behave the same way.
14 """
15
16 import TestGyp
17
18 test = TestGyp.TestGyp(formats=['scons'])
19
20 test.run_gyp('variants.gyp', chdir='src')
21
22 test.relocate('src', 'relocate/src')
23
24 test.build('variants.gyp', chdir='relocate/src')
25
26 test.run_built_executable('variants',
27 chdir='relocate/src',
28 stdout="Hello, world!\n")
29
30 test.sleep()
31 test.build('variants.gyp', 'VARIANT1=1', chdir='relocate/src')
32
33 test.run_built_executable('variants',
34 chdir='relocate/src',
35 stdout="Hello from VARIANT1\n")
36
37 test.sleep()
38 test.build('variants.gyp', 'VARIANT2=1', chdir='relocate/src')
39
40 test.run_built_executable('variants',
41 chdir='relocate/src',
42 stdout="Hello from VARIANT2\n")
43
44 test.pass_test()
+0
-13
mozc_build_tools/gyp/test/variants/src/variants.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #if defined(VARIANT1)
5 printf("Hello from VARIANT1\n");
6 #elif defined(VARIANT2)
7 printf("Hello from VARIANT2\n");
8 #else
9 printf("Hello, world!\n");
10 #endif
11 return 0;
12 }
+0
-27
mozc_build_tools/gyp/test/variants/src/variants.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'variants',
8 'type': 'executable',
9 'sources': [
10 'variants.c',
11 ],
12 'variants': {
13 'variant1' : {
14 'defines': [
15 'VARIANT1',
16 ],
17 },
18 'variant2' : {
19 'defines': [
20 'VARIANT2',
21 ],
22 },
23 },
24 },
25 ],
26 }
+0
-15
mozc_build_tools/gyp/tools/README less more
0 pretty_vcproj:
1 Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
2
3 They key/value pair are used to resolve vsprops name.
4
5 For example, if I want to diff the base.vcproj project:
6
7 pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
8 pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
9
10 And you can use your favorite diff tool to see the changes.
11
12 Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
13 I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
14 before you perform the diff.
+0
-142
mozc_build_tools/gyp/tools/pretty_gyp.py less more
0 #!/usr/bin/env python
1 # Copyright (c) 2009 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 # This file pretty-prints the contents of a GYP file.
6
7 import sys
8 import re
9
10 input = []
11 if len(sys.argv) > 1:
12 input_file = open(sys.argv[1])
13 input = input_file.read().splitlines()
14 input_file.close()
15 else:
16 input = sys.stdin.read().splitlines()
17
18 # This is used to remove comments when we're counting braces.
19 comment_re = re.compile(r'\s*#.*')
20
21 # This is used to remove quoted strings when we're counting braces.
22 # It takes into account quoted quotes, and makes sure that the quotes
23 # match.
24 # NOTE: It does not handle quotes that span more than one line, or
25 # cases where an escaped quote is preceeded by an escaped backslash.
26 quote_re_str = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
27 quote_re = re.compile(quote_re_str)
28
29 def comment_replace(matchobj):
30 return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
31
32 def mask_comments(input):
33 # This is used to mask the quoted strings so we skip braces inside
34 # quoted strings.
35 search_re = re.compile(r'(.*?)(#)(.*)')
36 return [search_re.sub(comment_replace, line) for line in input]
37
38 def quote_replace(matchobj):
39 return "%s%s%s%s" % (matchobj.group(1),
40 matchobj.group(2),
41 'x'*len(matchobj.group(3)),
42 matchobj.group(2))
43
44 def mask_quotes(input):
45 # This is used to mask the quoted strings so we skip braces inside
46 # quoted strings.
47 search_re = re.compile(r'(.*?)' + quote_re_str)
48 return [search_re.sub(quote_replace, line) for line in input]
49
50 def do_split(input, masked_input, search_re):
51 output = []
52 mask_output = []
53 for (line, masked_line) in zip(input, masked_input):
54 m = search_re.match(masked_line)
55 while m:
56 split = len(m.group(1))
57 line = line[:split] + r'\n' + line[split:]
58 masked_line = masked_line[:split] + r'\n' + masked_line[split:]
59 m = search_re.match(masked_line)
60 output.extend(line.split(r'\n'))
61 mask_output.extend(masked_line.split(r'\n'))
62 return (output, mask_output)
63
64 # This masks out the quotes and comments, and then splits appropriate
65 # lines (lines that matche the double_*_brace re's above) before
66 # indenting them below.
67 def split_double_braces(input):
68 # These are used to split lines which have multiple braces on them, so
69 # that the indentation looks prettier when all laid out (e.g. closing
70 # braces make a nice diagonal line).
71 double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
72 double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
73
74 masked_input = mask_quotes(input)
75 masked_input = mask_comments(masked_input)
76
77 (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
78 (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
79
80 return output
81
82 # This keeps track of the number of braces on a given line and returns
83 # the result. It starts at zero and subtracts for closed braces, and
84 # adds for open braces.
85 def count_braces(line):
86 open_braces = ['[', '(', '{']
87 close_braces = [']', ')', '}']
88 closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
89 cnt = 0
90 stripline = comment_re.sub(r'', line)
91 stripline = quote_re.sub(r"''", stripline)
92 for char in stripline:
93 for brace in open_braces:
94 if char == brace:
95 cnt += 1
96 for brace in close_braces:
97 if char == brace:
98 cnt -= 1
99
100 after = False
101 if cnt > 0:
102 after = True
103
104 # This catches the special case of a closing brace having something
105 # other than just whitespace ahead of it -- we don't want to
106 # unindent that until after this line is printed so it stays with
107 # the previous indentation level.
108 if cnt < 0 and closing_prefix_re.match(stripline):
109 after = True
110 return (cnt, after)
111
112 # This does the main work of indenting the input based on the brace counts.
113 def prettyprint_input(lines):
114 indent = 0
115 basic_offset = 2
116 last_line = ""
117 for line in lines:
118 if comment_re.match(line):
119 print line
120 else:
121 line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
122 if len(line) > 0:
123 (brace_diff, after) = count_braces(line)
124 if brace_diff != 0:
125 if after:
126 print " " * (basic_offset * indent) + line
127 indent += brace_diff
128 else:
129 indent += brace_diff
130 print " " * (basic_offset * indent) + line
131 else:
132 print " " * (basic_offset * indent) + line
133 else:
134 print ""
135 last_line = line
136
137 # Split up the double braces.
138 lines = split_double_braces(input)
139
140 # Indent and print the output.
141 prettyprint_input(lines)
+0
-167
mozc_build_tools/gyp/tools/pretty_sln.py less more
0 #!/usr/bin/python2.5
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Prints the information in a sln file in a diffable way.
7
8 It first outputs each projects in alphabetical order with their
9 dependencies.
10
11 Then it outputs a possible build order.
12 """
13
14 __author__ = 'nsylvain (Nicolas Sylvain)'
15
16 import os
17 import re
18 import sys
19 import pretty_vcproj
20
21 def BuildProject(project, built, projects, deps):
22 # if all dependencies are done, we can build it, otherwise we try to build the
23 # dependency.
24 # This is not infinite-recursion proof.
25 for dep in deps[project]:
26 if dep not in built:
27 BuildProject(dep, built, projects, deps)
28 print project
29 built.append(project)
30
31 def ParseSolution(solution_file):
32 # All projects, their clsid and paths.
33 projects = dict()
34
35 # A list of dependencies associated with a project.
36 dependencies = dict()
37
38 # Regular expressions that matches the SLN format.
39 # The first line of a project definition.
40 begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
41 '}"\) = "(.*)", "(.*)", "(.*)"$'))
42 # The last line of a project definition.
43 end_project = re.compile('^EndProject$')
44 # The first line of a dependency list.
45 begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
46 # The last line of a dependency list.
47 end_dep = re.compile('EndProjectSection$')
48 # A line describing a dependency.
49 dep_line = re.compile(' *({.*}) = ({.*})$')
50
51 in_deps = False
52 solution = open(solution_file)
53 for line in solution:
54 results = begin_project.search(line)
55 if results:
56 # Hack to remove icu because the diff is too different.
57 if results.group(1).find('icu') != -1:
58 continue
59 # We remove "_gyp" from the names because it helps to diff them.
60 current_project = results.group(1).replace('_gyp', '')
61 projects[current_project] = [results.group(2).replace('_gyp', ''),
62 results.group(3),
63 results.group(2)]
64 dependencies[current_project] = []
65 continue
66
67 results = end_project.search(line)
68 if results:
69 current_project = None
70 continue
71
72 results = begin_dep.search(line)
73 if results:
74 in_deps = True
75 continue
76
77 results = end_dep.search(line)
78 if results:
79 in_deps = False
80 continue
81
82 results = dep_line.search(line)
83 if results and in_deps and current_project:
84 dependencies[current_project].append(results.group(1))
85 continue
86
87 # Change all dependencies clsid to name instead.
88 for project in dependencies:
89 # For each dependencies in this project
90 new_dep_array = []
91 for dep in dependencies[project]:
92 # Look for the project name matching this cldis
93 for project_info in projects:
94 if projects[project_info][1] == dep:
95 new_dep_array.append(project_info)
96 dependencies[project] = sorted(new_dep_array)
97
98 return (projects, dependencies)
99
100 def PrintDependencies(projects, deps):
101 print "---------------------------------------"
102 print "Dependencies for all projects"
103 print "---------------------------------------"
104 print "-- --"
105
106 for (project, dep_list) in sorted(deps.items()):
107 print "Project : %s" % project
108 print "Path : %s" % projects[project][0]
109 if dep_list:
110 for dep in dep_list:
111 print " - %s" % dep
112 print ""
113
114 print "-- --"
115
116 def PrintBuildOrder(projects, deps):
117 print "---------------------------------------"
118 print "Build order "
119 print "---------------------------------------"
120 print "-- --"
121
122 built = []
123 for (project, dep_list) in sorted(deps.items()):
124 if project not in built:
125 BuildProject(project, built, projects, deps)
126
127 print "-- --"
128
129 def PrintVCProj(projects):
130
131 for project in projects:
132 print "-------------------------------------"
133 print "-------------------------------------"
134 print project
135 print project
136 print project
137 print "-------------------------------------"
138 print "-------------------------------------"
139
140 project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
141 projects[project][2]))
142
143 pretty = pretty_vcproj
144 argv = [ '',
145 project_path,
146 '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
147 ]
148 argv.extend(sys.argv[3:])
149 pretty.main(argv)
150
151 def main():
152 # check if we have exactly 1 parameter.
153 if len(sys.argv) < 2:
154 print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
155 return
156
157 (projects, deps) = ParseSolution(sys.argv[1])
158 PrintDependencies(projects, deps)
159 PrintBuildOrder(projects, deps)
160
161 if '--recursive' in sys.argv:
162 PrintVCProj(projects)
163
164 if __name__ == '__main__':
165 main()
166
+0
-316
mozc_build_tools/gyp/tools/pretty_vcproj.py less more
0 #!/usr/bin/python2.5
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Make the format of a vcproj really pretty.
7
8 This script normalize and sort an xml. It also fetches all the properties
9 inside linked vsprops and include them explicitly in the vcproj.
10
11 It outputs the resulting xml to stdout.
12 """
13
14 __author__ = 'nsylvain (Nicolas Sylvain)'
15
16 import os
17 import sys
18
19 from xml.dom.minidom import parse
20 from xml.dom.minidom import Node
21
22 REPLACEMENTS = dict()
23 ARGUMENTS = None
24
25 class CmpTuple:
26 """Compare function between 2 tuple."""
27 def __call__(self, x, y):
28 (key1, value1) = x
29 (key2, value2) = y
30 return cmp(key1, key2)
31
32 class CmpNode:
33 """Compare function between 2 xml nodes."""
34
35 def get_string(self, node):
36 node_string = "node"
37 node_string += node.nodeName
38 if node.nodeValue:
39 node_string += node.nodeValue
40
41 if node.attributes:
42 # We first sort by name, if present.
43 node_string += node.getAttribute("Name")
44
45 all_nodes = []
46 for (name, value) in node.attributes.items():
47 all_nodes.append((name, value))
48
49 all_nodes.sort(CmpTuple())
50 for (name, value) in all_nodes:
51 node_string += name
52 node_string += value
53
54 return node_string
55
56 def __call__(self, x, y):
57 return cmp(self.get_string(x), self.get_string(y))
58
59 def PrettyPrintNode(node, indent=0):
60 if node.nodeType == Node.TEXT_NODE:
61 if node.data.strip():
62 print '%s%s' % (' '*indent, node.data.strip())
63 return
64
65 if node.childNodes:
66 node.normalize()
67 # Get the number of attributes
68 attr_count = 0
69 if node.attributes:
70 attr_count = node.attributes.length
71
72 # Print the main tag
73 if attr_count == 0:
74 print '%s<%s>' % (' '*indent, node.nodeName)
75 else:
76 print '%s<%s' % (' '*indent, node.nodeName)
77
78 all_attributes = []
79 for (name, value) in node.attributes.items():
80 all_attributes.append((name, value))
81 all_attributes.sort(CmpTuple())
82 for (name, value) in all_attributes:
83 print '%s %s="%s"' % (' '*indent, name, value)
84 print '%s>' % (' '*indent)
85 if node.nodeValue:
86 print '%s %s' % (' '*indent, node.nodeValue)
87
88 for sub_node in node.childNodes:
89 PrettyPrintNode(sub_node, indent=indent+2)
90 print '%s</%s>' % (' '*indent, node.nodeName)
91
92 def FlattenFilter(node):
93 """Returns a list of all the node and sub nodes."""
94 node_list = []
95
96 if (node.attributes and
97 node.getAttribute('Name') == '_excluded_files'):
98 # We don't add the "_excluded_files" filter.
99 return []
100
101 for current in node.childNodes:
102 if current.nodeName == 'Filter':
103 node_list.extend(FlattenFilter(current))
104 else:
105 node_list.append(current)
106
107 return node_list
108
109 def FixFilenames(filenames, current_directory):
110 new_list = []
111 for filename in filenames:
112 if filename:
113 for key in REPLACEMENTS:
114 filename = filename.replace(key, REPLACEMENTS[key])
115 os.chdir(current_directory)
116 filename = filename.strip('"\' ')
117 if filename.startswith('$'):
118 new_list.append(filename)
119 else:
120 new_list.append(os.path.abspath(filename))
121 return new_list
122
123 def AbsoluteNode(node):
124 # Make all the properties we know about in this node absolute.
125 if node.attributes:
126 for (name, value) in node.attributes.items():
127 if name in ['InheritedPropertySheets', 'RelativePath',
128 'AdditionalIncludeDirectories',
129 'IntermediateDirectory', 'OutputDirectory',
130 'AdditionalLibraryDirectories']:
131 # We want to fix up these paths
132 path_list = value.split(';')
133 new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
134 node.setAttribute(name, ';'.join(new_list))
135 if not value:
136 node.removeAttribute(name)
137
138 def CleanupVcproj(node):
139 # For each sub node, we call recursively this function.
140 for sub_node in node.childNodes:
141 AbsoluteNode(sub_node)
142 CleanupVcproj(sub_node)
143
144 # Normalize the node, and remove all extranous whitespaces.
145 for sub_node in node.childNodes:
146 if sub_node.nodeType == Node.TEXT_NODE:
147 sub_node.data = sub_node.data.replace("\r", "")
148 sub_node.data = sub_node.data.replace("\n", "")
149 sub_node.data = sub_node.data.rstrip()
150
151 # Fix all the semicolon separated attributes to be sorted, and we also
152 # remove the dups.
153 if node.attributes:
154 for (name, value) in node.attributes.items():
155 sorted_list = sorted(value.split(';'))
156 unique_list = []
157 [unique_list.append(i) for i in sorted_list if not unique_list.count(i)]
158 node.setAttribute(name, ';'.join(unique_list))
159 if not value:
160 node.removeAttribute(name)
161
162 if node.childNodes:
163 node.normalize()
164
165 # For each node, take a copy, and remove it from the list.
166 node_array = []
167 while node.childNodes and node.childNodes[0]:
168 # Take a copy of the node and remove it from the list.
169 current = node.childNodes[0]
170 node.removeChild(current)
171
172 # If the child is a filter, we want to append all its children
173 # to this same list.
174 if current.nodeName == 'Filter':
175 node_array.extend(FlattenFilter(current))
176 else:
177 node_array.append(current)
178
179
180 # Sort the list.
181 node_array.sort(CmpNode())
182
183 # Insert the nodes in the correct order.
184 for new_node in node_array:
185 # But don't append empty tool node.
186 if new_node.nodeName == 'Tool':
187 if new_node.attributes and new_node.attributes.length == 1:
188 # This one was empty.
189 continue
190 if new_node.nodeName == 'UserMacro':
191 continue
192 node.appendChild(new_node)
193
194 def GetConfiguationNodes(vcproj):
195 #TODO(nsylvain): Find a better way to navigate the xml.
196 nodes = []
197 for node in vcproj.childNodes:
198 if node.nodeName == "Configurations":
199 for sub_node in node.childNodes:
200 if sub_node.nodeName == "Configuration":
201 nodes.append(sub_node)
202
203 return nodes
204
205 def GetChildrenVsprops(filename):
206 dom = parse(filename)
207 if dom.documentElement.attributes:
208 vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
209 return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
210 return []
211
212 def SeekToNode(node1, child2):
213 # A text node does not have properties.
214 if child2.nodeType == Node.TEXT_NODE:
215 return None
216
217 # Get the name of the current node.
218 current_name = child2.getAttribute("Name")
219 if not current_name:
220 # There is no name. We don't know how to merge.
221 return None
222
223 # Look through all the nodes to find a match.
224 for sub_node in node1.childNodes:
225 if sub_node.nodeName == child2.nodeName:
226 name = sub_node.getAttribute("Name")
227 if name == current_name:
228 return sub_node
229
230 # No match. We give up.
231 return None
232
233 def MergeAttributes(node1, node2):
234 # No attributes to merge?
235 if not node2.attributes:
236 return
237
238 for (name, value2) in node2.attributes.items():
239 # Don't merge the 'Name' attribute.
240 if name == 'Name':
241 continue
242 value1 = node1.getAttribute(name)
243 if value1:
244 # The attribute exist in the main node. If it's equal, we leave it
245 # untouched, otherwise we concatenate it.
246 if value1 != value2:
247 node1.setAttribute(name, ';'.join([value1, value2]))
248 else:
249 # The attribute does nto exist in the main node. We append this one.
250 node1.setAttribute(name, value2)
251
252 # If the attribute was a property sheet attributes, we remove it, since
253 # they are useless.
254 if name == 'InheritedPropertySheets':
255 node1.removeAttribute(name)
256
257 def MergeProperties(node1, node2):
258 MergeAttributes(node1, node2)
259 for child2 in node2.childNodes:
260 child1 = SeekToNode(node1, child2)
261 if child1:
262 MergeProperties(child1, child2)
263 else:
264 node1.appendChild(child2.cloneNode(True))
265
266 def main(argv):
267 global REPLACEMENTS
268 global ARGUMENTS
269 ARGUMENTS = argv
270 """Main function of this vcproj prettifier."""
271
272 # check if we have exactly 1 parameter.
273 if len(argv) < 2:
274 print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
275 '[key2=value2]' % argv[0])
276 return
277
278 # Parse the keys
279 for i in range(2, len(argv)):
280 (key, value) = argv[i].split('=')
281 REPLACEMENTS[key] = value
282
283 # Open the vcproj and parse the xml.
284 dom = parse(argv[1])
285
286 # First thing we need to do is find the Configuration Node and merge them
287 # with the vsprops they include.
288 for configuration_node in GetConfiguationNodes(dom.documentElement):
289 # Get the property sheets associated with this configuration.
290 vsprops = configuration_node.getAttribute('InheritedPropertySheets')
291
292 # Fix the filenames to be absolute.
293 vsprops_list = FixFilenames(vsprops.strip().split(';'),
294 os.path.dirname(argv[1]))
295
296 # Extend the list of vsprops with all vsprops contained in the current
297 # vsprops.
298 for current_vsprops in vsprops_list:
299 vsprops_list.extend(GetChildrenVsprops(current_vsprops))
300
301 # Now that we have all the vsprops, we need to merge them.
302 for current_vsprops in vsprops_list:
303 MergeProperties(configuration_node,
304 parse(current_vsprops).documentElement)
305
306 # Now that everything is merged, we need to cleanup the xml.
307 CleanupVcproj(dom.documentElement)
308
309 # Finally, we use the prett xml function to print the vcproj back to the
310 # user.
311 #print dom.toprettyxml(newl="\n")
312 PrettyPrintNode(dom.documentElement)
313
314 if __name__ == '__main__':
315 main(sys.argv)
00 MAJOR=0
11 MINOR=12
2 BUILD=410
2 BUILD=422
33 REVISION=102
+0
-5
third_party/gyp/AUTHORS less more
0 # Names should be added to this file like so:
1 # Name or Organization <email address>
2
3 Google Inc.
4 Steven Knight <knight@baldmt.com>
+0
-8
third_party/gyp/DEPS less more
0 # DEPS file for gclient use in buildbot execution of gyp tests.
1 #
2 # (You don't need to use gclient for normal GYP development work.)
3
4 deps = {
5 "scons":
6 "svn://chrome-svn.corp.google.com/chrome/trunk/src/third_party/scons",
7 }
+0
-27
third_party/gyp/LICENSE less more
0 Copyright (c) 2009 Google Inc. All rights reserved.
1
2 Redistribution and use in source and binary forms, with or without
3 modification, are permitted provided that the following conditions are
4 met:
5
6 * Redistributions of source code must retain the above copyright
7 notice, this list of conditions and the following disclaimer.
8 * Redistributions in binary form must reproduce the above
9 copyright notice, this list of conditions and the following disclaimer
10 in the documentation and/or other materials provided with the
11 distribution.
12 * Neither the name of Google Inc. nor the names of its
13 contributors may be used to endorse or promote products derived from
14 this software without specific prior written permission.
15
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+0
-21
third_party/gyp/MANIFEST less more
0 setup.py
1 gyp
2 LICENSE
3 AUTHORS
4 pylib/gyp/MSVSNew.py
5 pylib/gyp/MSVSProject.py
6 pylib/gyp/MSVSToolFile.py
7 pylib/gyp/MSVSUserFile.py
8 pylib/gyp/MSVSVersion.py
9 pylib/gyp/SCons.py
10 pylib/gyp/__init__.py
11 pylib/gyp/common.py
12 pylib/gyp/input.py
13 pylib/gyp/xcodeproj_file.py
14 pylib/gyp/generator/__init__.py
15 pylib/gyp/generator/gypd.py
16 pylib/gyp/generator/gypsh.py
17 pylib/gyp/generator/make.py
18 pylib/gyp/generator/msvs.py
19 pylib/gyp/generator/scons.py
20 pylib/gyp/generator/xcode.py
+0
-53
third_party/gyp/PRESUBMIT.py less more
0 # Copyright 2010, Google Inc.
1 # All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29
30 EXCLUDED_PATHS = ()
31
32
33 def CheckChangeOnUpload(input_api, output_api):
34 report = []
35 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
36 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
37 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
38 input_api, output_api, sources))
39 return report
40
41
42 def CheckChangeOnCommit(input_api, output_api):
43 report = []
44 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
45 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
46 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
47 input_api, output_api, sources))
48 report.extend(input_api.canned_checks.CheckTreeIsOpen(
49 input_api, output_api,
50 'http://gyp-status.appspot.com/status',
51 'http://gyp-status.appspot.com/current'))
52 return report
+0
-10
third_party/gyp/codereview.settings less more
0 # This file is used by gcl to get repository specific information.
1 CODE_REVIEW_SERVER: codereview.chromium.org
2 CC_LIST: gyp-developer@googlegroups.com
3 VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
4 TRY_ON_UPLOAD: True
5 TRYSERVER_PROJECT: gyp
6 TRYSERVER_PATCHLEVEL: 0
7 TRYSERVER_ROOT: trunk
8 TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
9
+0
-18
third_party/gyp/gyp less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 # TODO(mark): sys.path manipulation is some temporary testing stuff.
9 try:
10 import gyp
11 except ImportError, e:
12 import os.path
13 sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
14 import gyp
15
16 if __name__ == '__main__':
17 sys.exit(gyp.main(sys.argv[1:]))
+0
-5
third_party/gyp/gyp.bat less more
0 @rem Copyright (c) 2009 Google Inc. All rights reserved.
1 @rem Use of this source code is governed by a BSD-style license that can be
2 @rem found in the LICENSE file.
3
4 @python "%~dp0/gyp" %*
+0
-7
third_party/gyp/gyp_dummy.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 int main() {
5 return 0;
6 }
+0
-255
third_party/gyp/gyptest.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 __doc__ = """
7 gyptest.py -- test runner for GYP tests.
8 """
9
10 import os
11 import optparse
12 import subprocess
13 import sys
14
15 class CommandRunner:
16 """
17 Executor class for commands, including "commands" implemented by
18 Python functions.
19 """
20 verbose = True
21 active = True
22
23 def __init__(self, dictionary={}):
24 self.subst_dictionary(dictionary)
25
26 def subst_dictionary(self, dictionary):
27 self._subst_dictionary = dictionary
28
29 def subst(self, string, dictionary=None):
30 """
31 Substitutes (via the format operator) the values in the specified
32 dictionary into the specified command.
33
34 The command can be an (action, string) tuple. In all cases, we
35 perform substitution on strings and don't worry if something isn't
36 a string. (It's probably a Python function to be executed.)
37 """
38 if dictionary is None:
39 dictionary = self._subst_dictionary
40 if dictionary:
41 try:
42 string = string % dictionary
43 except TypeError:
44 pass
45 return string
46
47 def display(self, command, stdout=None, stderr=None):
48 if not self.verbose:
49 return
50 if type(command) == type(()):
51 func = command[0]
52 args = command[1:]
53 s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
54 if type(command) == type([]):
55 # TODO: quote arguments containing spaces
56 # TODO: handle meta characters?
57 s = ' '.join(command)
58 else:
59 s = self.subst(command)
60 if not s.endswith('\n'):
61 s += '\n'
62 sys.stdout.write(s)
63 sys.stdout.flush()
64
65 def execute(self, command, stdout=None, stderr=None):
66 """
67 Executes a single command.
68 """
69 if not self.active:
70 return 0
71 if type(command) == type(''):
72 command = self.subst(command)
73 cmdargs = shlex.split(command)
74 if cmdargs[0] == 'cd':
75 command = (os.chdir,) + tuple(cmdargs[1:])
76 if type(command) == type(()):
77 func = command[0]
78 args = command[1:]
79 return func(*args)
80 else:
81 if stdout is sys.stdout:
82 # Same as passing sys.stdout, except python2.4 doesn't fail on it.
83 subout = None
84 else:
85 # Open pipe for anything else so Popen works on python2.4.
86 subout = subprocess.PIPE
87 if stderr is sys.stderr:
88 # Same as passing sys.stderr, except python2.4 doesn't fail on it.
89 suberr = None
90 elif stderr is None:
91 # Merge with stdout if stderr isn't specified.
92 suberr = subprocess.STDOUT
93 else:
94 # Open pipe for anything else so Popen works on python2.4.
95 suberr = subprocess.PIPE
96 p = subprocess.Popen(command,
97 shell=(sys.platform == 'win32'),
98 stdout=subout,
99 stderr=suberr)
100 p.wait()
101 if stdout is None:
102 self.stdout = p.stdout.read()
103 elif stdout is not sys.stdout:
104 stdout.write(p.stdout.read())
105 if stderr not in (None, sys.stderr):
106 stderr.write(p.stderr.read())
107 return p.returncode
108
109 def run(self, command, display=None, stdout=None, stderr=None):
110 """
111 Runs a single command, displaying it first.
112 """
113 if display is None:
114 display = command
115 self.display(display)
116 return self.execute(command, stdout, stderr)
117
118
119 class Unbuffered:
120 def __init__(self, fp):
121 self.fp = fp
122 def write(self, arg):
123 self.fp.write(arg)
124 self.fp.flush()
125 def __getattr__(self, attr):
126 return getattr(self.fp, attr)
127
128 sys.stdout = Unbuffered(sys.stdout)
129 sys.stderr = Unbuffered(sys.stderr)
130
131
132 def find_all_gyptest_files(directory):
133 result = []
134 for root, dirs, files in os.walk(directory):
135 if '.svn' in dirs:
136 dirs.remove('.svn')
137 result.extend([ os.path.join(root, f) for f in files
138 if f.startswith('gyptest') and f.endswith('.py') ])
139 result.sort()
140 return result
141
142
143 def main(argv=None):
144 if argv is None:
145 argv = sys.argv
146
147 usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
148 parser = optparse.OptionParser(usage=usage)
149 parser.add_option("-a", "--all", action="store_true",
150 help="run all tests")
151 parser.add_option("-C", "--chdir", action="store", default=None,
152 help="chdir to the specified directory")
153 parser.add_option("-f", "--format", action="store", default='',
154 help="run tests with the specified formats")
155 parser.add_option("-l", "--list", action="store_true",
156 help="list available tests and exit")
157 parser.add_option("-n", "--no-exec", action="store_true",
158 help="no execute, just print the command line")
159 parser.add_option("--passed", action="store_true",
160 help="report passed tests")
161 parser.add_option("--path", action="append", default=[],
162 help="additional $PATH directory")
163 parser.add_option("-q", "--quiet", action="store_true",
164 help="quiet, don't print test command lines")
165 opts, args = parser.parse_args(argv[1:])
166
167 if opts.chdir:
168 os.chdir(opts.chdir)
169
170 if opts.path:
171 os.environ['PATH'] += ':' + ':'.join(opts.path)
172
173 if not args:
174 if not opts.all:
175 sys.stderr.write('Specify -a to get all tests.\n')
176 return 1
177 args = ['test']
178
179 tests = []
180 for arg in args:
181 if os.path.isdir(arg):
182 tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
183 else:
184 tests.append(arg)
185
186 if opts.list:
187 for test in tests:
188 print test
189 sys.exit(0)
190
191 CommandRunner.verbose = not opts.quiet
192 CommandRunner.active = not opts.no_exec
193 cr = CommandRunner()
194
195 os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
196 if not opts.quiet:
197 sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
198
199 passed = []
200 failed = []
201 no_result = []
202
203 if opts.format:
204 format_list = opts.format.split(',')
205 else:
206 # TODO: not duplicate this mapping from pylib/gyp/__init__.py
207 format_list = [ {
208 'freebsd7': 'make',
209 'freebsd8': 'make',
210 'cygwin': 'msvs',
211 'win32': 'msvs',
212 'linux2': 'make',
213 'darwin': 'xcode',
214 }[sys.platform] ]
215
216 for format in format_list:
217 os.environ['TESTGYP_FORMAT'] = format
218 if not opts.quiet:
219 sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
220
221 for test in tests:
222 status = cr.run([sys.executable, test],
223 stdout=sys.stdout,
224 stderr=sys.stderr)
225 if status == 2:
226 no_result.append(test)
227 elif status:
228 failed.append(test)
229 else:
230 passed.append(test)
231
232 if not opts.quiet:
233 def report(description, tests):
234 if tests:
235 if len(tests) == 1:
236 sys.stdout.write("\n%s the following test:\n" % description)
237 else:
238 fmt = "\n%s the following %d tests:\n"
239 sys.stdout.write(fmt % (description, len(tests)))
240 sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
241
242 if opts.passed:
243 report("Passed", passed)
244 report("Failed", failed)
245 report("No result from", no_result)
246
247 if failed:
248 return 1
249 else:
250 return 0
251
252
253 if __name__ == "__main__":
254 sys.exit(main())
+0
-331
third_party/gyp/pylib/gyp/MSVSNew.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """New implementation of Visual Studio project generation for SCons."""
7
8 import common
9 import os
10 import random
11
12 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
13 # and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
14 # available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
15 # preserving 2.4 compatibility.
16 try:
17 import hashlib
18 _new_md5 = hashlib.md5
19 except ImportError:
20 import md5
21 _new_md5 = md5.new
22
23
24 # Initialize random number generator
25 random.seed()
26
27 # GUIDs for project types
28 ENTRY_TYPE_GUIDS = {
29 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
30 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
31 }
32
33 #------------------------------------------------------------------------------
34 # Helper functions
35
36
37 def MakeGuid(name, seed='msvs_new'):
38 """Returns a GUID for the specified target name.
39
40 Args:
41 name: Target name.
42 seed: Seed for MD5 hash.
43 Returns:
44 A GUID-line string calculated from the name and seed.
45
46 This generates something which looks like a GUID, but depends only on the
47 name and seed. This means the same name/seed will always generate the same
48 GUID, so that projects and solutions which refer to each other can explicitly
49 determine the GUID to refer to explicitly. It also means that the GUID will
50 not change when the project for a target is rebuilt.
51 """
52 # Calculate a MD5 signature for the seed and name.
53 d = _new_md5(str(seed) + str(name)).hexdigest().upper()
54 # Convert most of the signature to GUID form (discard the rest)
55 guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
56 + '-' + d[20:32] + '}')
57 return guid
58
59 #------------------------------------------------------------------------------
60
61
62 class MSVSFolder:
63 """Folder in a Visual Studio project or solution."""
64
65 def __init__(self, path, name = None, entries = None,
66 guid = None, items = None):
67 """Initializes the folder.
68
69 Args:
70 path: Full path to the folder.
71 name: Name of the folder.
72 entries: List of folder entries to nest inside this folder. May contain
73 Folder or Project objects. May be None, if the folder is empty.
74 guid: GUID to use for folder, if not None.
75 items: List of solution items to include in the folder project. May be
76 None, if the folder does not directly contain items.
77 """
78 if name:
79 self.name = name
80 else:
81 # Use last layer.
82 self.name = os.path.basename(path)
83
84 self.path = path
85 self.guid = guid
86
87 # Copy passed lists (or set to empty lists)
88 self.entries = list(entries or [])
89 self.items = list(items or [])
90
91 self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
92
93 def get_guid(self):
94 if self.guid is None:
95 # Use consistent guids for folders (so things don't regenerate).
96 self.guid = MakeGuid(self.path, seed='msvs_folder')
97 return self.guid
98
99
100 #------------------------------------------------------------------------------
101
102
103 class MSVSProject:
104 """Visual Studio project."""
105
106 def __init__(self, path, name = None, dependencies = None, guid = None,
107 config_platform_overrides = None):
108 """Initializes the project.
109
110 Args:
111 path: Relative path to project file.
112 name: Name of project. If None, the name will be the same as the base
113 name of the project file.
114 dependencies: List of other Project objects this project is dependent
115 upon, if not None.
116 guid: GUID to use for project, if not None.
117 config_platform_overrides: optional dict of configuration platforms to
118 used in place of the default for this target.
119 """
120 self.path = path
121 self.guid = guid
122
123 if name:
124 self.name = name
125 else:
126 # Use project filename
127 self.name = os.path.splitext(os.path.basename(path))[0]
128
129 # Copy passed lists (or set to empty lists)
130 self.dependencies = list(dependencies or [])
131
132 self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
133
134 if config_platform_overrides:
135 self.config_platform_overrides = config_platform_overrides
136 else:
137 self.config_platform_overrides = {}
138
139 def get_guid(self):
140 if self.guid is None:
141 # Set GUID from path
142 # TODO(rspangler): This is fragile.
143 # 1. We can't just use the project filename sans path, since there could
144 # be multiple projects with the same base name (for example,
145 # foo/unittest.vcproj and bar/unittest.vcproj).
146 # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
147 # GUID is the same whether it's included from base/base.sln or
148 # foo/bar/baz/baz.sln.
149 # 3. The GUID needs to be the same each time this builder is invoked, so
150 # that we don't need to rebuild the solution when the project changes.
151 # 4. We should be able to handle pre-built project files by reading the
152 # GUID from the files.
153 self.guid = MakeGuid(self.name)
154 return self.guid
155
156 #------------------------------------------------------------------------------
157
158
159 class MSVSSolution:
160 """Visual Studio solution."""
161
162 def __init__(self, path, version, entries=None, variants=None,
163 websiteProperties=True):
164 """Initializes the solution.
165
166 Args:
167 path: Path to solution file.
168 version: Format version to emit.
169 entries: List of entries in solution. May contain Folder or Project
170 objects. May be None, if the folder is empty.
171 variants: List of build variant strings. If none, a default list will
172 be used.
173 websiteProperties: Flag to decide if the website properties section
174 is generated.
175 """
176 self.path = path
177 self.websiteProperties = websiteProperties
178 self.version = version
179
180 # Copy passed lists (or set to empty lists)
181 self.entries = list(entries or [])
182
183 if variants:
184 # Copy passed list
185 self.variants = variants[:]
186 else:
187 # Use default
188 self.variants = ['Debug|Win32', 'Release|Win32']
189 # TODO(rspangler): Need to be able to handle a mapping of solution config
190 # to project config. Should we be able to handle variants being a dict,
191 # or add a separate variant_map variable? If it's a dict, we can't
192 # guarantee the order of variants since dict keys aren't ordered.
193
194
195 # TODO(rspangler): Automatically write to disk for now; should delay until
196 # node-evaluation time.
197 self.Write()
198
199
200 def Write(self, writer=common.WriteOnDiff):
201 """Writes the solution file to disk.
202
203 Raises:
204 IndexError: An entry appears multiple times.
205 """
206 # Walk the entry tree and collect all the folders and projects.
207 all_entries = []
208 entries_to_check = self.entries[:]
209 while entries_to_check:
210 # Pop from the beginning of the list to preserve the user's order.
211 e = entries_to_check.pop(0)
212
213 # A project or folder can only appear once in the solution's folder tree.
214 # This also protects from cycles.
215 if e in all_entries:
216 #raise IndexError('Entry "%s" appears more than once in solution' %
217 # e.name)
218 continue
219
220 all_entries.append(e)
221
222 # If this is a folder, check its entries too.
223 if isinstance(e, MSVSFolder):
224 entries_to_check += e.entries
225
226 # Sort by name then guid (so things are in order on vs2008).
227 def NameThenGuid(a, b):
228 if a.name < b.name: return -1
229 if a.name > b.name: return 1
230 if a.get_guid() < b.get_guid(): return -1
231 if a.get_guid() > b.get_guid(): return 1
232 return 0
233
234 all_entries = sorted(all_entries, NameThenGuid)
235
236 # Open file and print header
237 f = writer(self.path)
238 f.write('Microsoft Visual Studio Solution File, '
239 'Format Version %s\r\n' % self.version.SolutionVersion())
240 f.write('# %s\r\n' % self.version.Description())
241
242 # Project entries
243 for e in all_entries:
244 f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
245 e.entry_type_guid, # Entry type GUID
246 e.name, # Folder name
247 e.path.replace('/', '\\'), # Folder name (again)
248 e.get_guid(), # Entry GUID
249 ))
250
251 # TODO(rspangler): Need a way to configure this stuff
252 if self.websiteProperties:
253 f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
254 '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
255 '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
256 '\tEndProjectSection\r\n')
257
258 if isinstance(e, MSVSFolder):
259 if e.items:
260 f.write('\tProjectSection(SolutionItems) = preProject\r\n')
261 for i in e.items:
262 f.write('\t\t%s = %s\r\n' % (i, i))
263 f.write('\tEndProjectSection\r\n')
264
265 if isinstance(e, MSVSProject):
266 if e.dependencies:
267 f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
268 for d in e.dependencies:
269 f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
270 f.write('\tEndProjectSection\r\n')
271
272 f.write('EndProject\r\n')
273
274 # Global section
275 f.write('Global\r\n')
276
277 # Configurations (variants)
278 f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
279 for v in self.variants:
280 f.write('\t\t%s = %s\r\n' % (v, v))
281 f.write('\tEndGlobalSection\r\n')
282
283 # Sort config guids for easier diffing of solution changes.
284 config_guids = []
285 config_guids_overrides = {}
286 for e in all_entries:
287 if isinstance(e, MSVSProject):
288 config_guids.append(e.get_guid())
289 config_guids_overrides[e.get_guid()] = e.config_platform_overrides
290 config_guids.sort()
291
292 f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
293 for g in config_guids:
294 for v in self.variants:
295 nv = config_guids_overrides[g].get(v, v)
296 # Pick which project configuration to build for this solution
297 # configuration.
298 f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
299 g, # Project GUID
300 v, # Solution build configuration
301 nv, # Project build config for that solution config
302 ))
303
304 # Enable project in this solution configuration.
305 f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
306 g, # Project GUID
307 v, # Solution build configuration
308 nv, # Project build config for that solution config
309 ))
310 f.write('\tEndGlobalSection\r\n')
311
312 # TODO(rspangler): Should be able to configure this stuff too (though I've
313 # never seen this be any different)
314 f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
315 f.write('\t\tHideSolutionNode = FALSE\r\n')
316 f.write('\tEndGlobalSection\r\n')
317
318 # Folder mappings
319 # TODO(rspangler): Should omit this section if there are no folders
320 f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
321 for e in all_entries:
322 if not isinstance(e, MSVSFolder):
323 continue # Does not apply to projects, only folders
324 for subentry in e.entries:
325 f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
326 f.write('\tEndGlobalSection\r\n')
327
328 f.write('EndGlobal\r\n')
329
330 f.close()
+0
-244
third_party/gyp/pylib/gyp/MSVSProject.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11 import MSVSNew
12
13 #------------------------------------------------------------------------------
14
15
16 class Tool(object):
17 """Visual Studio tool."""
18
19 def __init__(self, name, attrs=None):
20 """Initializes the tool.
21
22 Args:
23 name: Tool name.
24 attrs: Dict of tool attributes; may be None.
25 """
26 self.name = name
27 self.attrs = attrs or {}
28
29 def CreateElement(self, doc):
30 """Creates an element for the tool.
31
32 Args:
33 doc: xml.dom.Document object to use for node creation.
34
35 Returns:
36 A new xml.dom.Element for the tool.
37 """
38 node = doc.createElement('Tool')
39 node.setAttribute('Name', self.name)
40 for k, v in self.attrs.items():
41 node.setAttribute(k, v)
42 return node
43
44
45 class Filter(object):
46 """Visual Studio filter - that is, a virtual folder."""
47
48 def __init__(self, name, contents=None):
49 """Initializes the folder.
50
51 Args:
52 name: Filter (folder) name.
53 contents: List of filenames and/or Filter objects contained.
54 """
55 self.name = name
56 self.contents = list(contents or [])
57
58
59 #------------------------------------------------------------------------------
60
61
62 class Writer(object):
63 """Visual Studio XML project writer."""
64
65 def __init__(self, project_path, version):
66 """Initializes the project.
67
68 Args:
69 project_path: Path to the project file.
70 version: Format version to emit.
71 """
72 self.project_path = project_path
73 self.doc = None
74 self.version = version
75
76 def Create(self, name, guid=None, platforms=None):
77 """Creates the project document.
78
79 Args:
80 name: Name of the project.
81 guid: GUID to use for project, if not None.
82 """
83 self.name = name
84 self.guid = guid or MSVSNew.MakeGuid(self.project_path)
85
86 # Default to Win32 for platforms.
87 if not platforms:
88 platforms = ['Win32']
89
90 # Create XML doc
91 xml_impl = xml.dom.getDOMImplementation()
92 self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
93
94 # Add attributes to root element
95 self.n_root = self.doc.documentElement
96 self.n_root.setAttribute('ProjectType', 'Visual C++')
97 self.n_root.setAttribute('Version', self.version.ProjectVersion())
98 self.n_root.setAttribute('Name', self.name)
99 self.n_root.setAttribute('ProjectGUID', self.guid)
100 self.n_root.setAttribute('RootNamespace', self.name)
101 self.n_root.setAttribute('Keyword', 'Win32Proj')
102
103 # Add platform list
104 n_platform = self.doc.createElement('Platforms')
105 self.n_root.appendChild(n_platform)
106 for platform in platforms:
107 n = self.doc.createElement('Platform')
108 n.setAttribute('Name', platform)
109 n_platform.appendChild(n)
110
111 # Add tool files section
112 self.n_tool_files = self.doc.createElement('ToolFiles')
113 self.n_root.appendChild(self.n_tool_files)
114
115 # Add configurations section
116 self.n_configs = self.doc.createElement('Configurations')
117 self.n_root.appendChild(self.n_configs)
118
119 # Add empty References section
120 self.n_root.appendChild(self.doc.createElement('References'))
121
122 # Add files section
123 self.n_files = self.doc.createElement('Files')
124 self.n_root.appendChild(self.n_files)
125 # Keep a dict keyed on filename to speed up access.
126 self.n_files_dict = dict()
127
128 # Add empty Globals section
129 self.n_root.appendChild(self.doc.createElement('Globals'))
130
131 def AddToolFile(self, path):
132 """Adds a tool file to the project.
133
134 Args:
135 path: Relative path from project to tool file.
136 """
137 n_tool = self.doc.createElement('ToolFile')
138 n_tool.setAttribute('RelativePath', path)
139 self.n_tool_files.appendChild(n_tool)
140
141 def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
142 tools=None):
143 """Adds a configuration to the parent node.
144
145 Args:
146 parent: Destination node.
147 config_type: Type of configuration node.
148 config_name: Configuration name.
149 attrs: Dict of configuration attributes; may be None.
150 tools: List of tools (strings or Tool objects); may be None.
151 """
152 # Handle defaults
153 if not attrs:
154 attrs = {}
155 if not tools:
156 tools = []
157
158 # Add configuration node and its attributes
159 n_config = self.doc.createElement(config_type)
160 n_config.setAttribute('Name', config_name)
161 for k, v in attrs.items():
162 n_config.setAttribute(k, v)
163 parent.appendChild(n_config)
164
165 # Add tool nodes and their attributes
166 if tools:
167 for t in tools:
168 if isinstance(t, Tool):
169 n_config.appendChild(t.CreateElement(self.doc))
170 else:
171 n_config.appendChild(Tool(t).CreateElement(self.doc))
172
173 def AddConfig(self, name, attrs=None, tools=None):
174 """Adds a configuration to the project.
175
176 Args:
177 name: Configuration name.
178 attrs: Dict of configuration attributes; may be None.
179 tools: List of tools (strings or Tool objects); may be None.
180 """
181 self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
182
183 def _AddFilesToNode(self, parent, files):
184 """Adds files and/or filters to the parent node.
185
186 Args:
187 parent: Destination node
188 files: A list of Filter objects and/or relative paths to files.
189
190 Will call itself recursively, if the files list contains Filter objects.
191 """
192 for f in files:
193 if isinstance(f, Filter):
194 node = self.doc.createElement('Filter')
195 node.setAttribute('Name', f.name)
196 self._AddFilesToNode(node, f.contents)
197 else:
198 node = self.doc.createElement('File')
199 node.setAttribute('RelativePath', f)
200 self.n_files_dict[f] = node
201 parent.appendChild(node)
202
203 def AddFiles(self, files):
204 """Adds files to the project.
205
206 Args:
207 files: A list of Filter objects and/or relative paths to files.
208
209 This makes a copy of the file/filter tree at the time of this call. If you
210 later add files to a Filter object which was passed into a previous call
211 to AddFiles(), it will not be reflected in this project.
212 """
213 self._AddFilesToNode(self.n_files, files)
214 # TODO(rspangler) This also doesn't handle adding files to an existing
215 # filter. That is, it doesn't merge the trees.
216
217 def AddFileConfig(self, path, config, attrs=None, tools=None):
218 """Adds a configuration to a file.
219
220 Args:
221 path: Relative path to the file.
222 config: Name of configuration to add.
223 attrs: Dict of configuration attributes; may be None.
224 tools: List of tools (strings or Tool objects); may be None.
225
226 Raises:
227 ValueError: Relative path does not match any file added via AddFiles().
228 """
229 # Find the file node with the right relative path
230 parent = self.n_files_dict.get(path)
231 if not parent:
232 raise ValueError('AddFileConfig: file "%s" not in project.' % path)
233
234 # Add the config to the file node
235 self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
236
237 def Write(self, writer=common.WriteOnDiff):
238 """Writes the project file."""
239 f = writer(self.project_path)
240 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
241 f.close()
242
243 #------------------------------------------------------------------------------
+0
-79
third_party/gyp/pylib/gyp/MSVSToolFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11
12
13 #------------------------------------------------------------------------------
14
15
16 class Writer(object):
17 """Visual Studio XML tool file writer."""
18
19 def __init__(self, tool_file_path):
20 """Initializes the tool file.
21
22 Args:
23 tool_file_path: Path to the tool file.
24 """
25 self.tool_file_path = tool_file_path
26 self.doc = None
27
28 def Create(self, name):
29 """Creates the tool file document.
30
31 Args:
32 name: Name of the tool file.
33 """
34 self.name = name
35
36 # Create XML doc
37 xml_impl = xml.dom.getDOMImplementation()
38 self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
39
40 # Add attributes to root element
41 self.n_root = self.doc.documentElement
42 self.n_root.setAttribute('Version', '8.00')
43 self.n_root.setAttribute('Name', self.name)
44
45 # Add rules section
46 self.n_rules = self.doc.createElement('Rules')
47 self.n_root.appendChild(self.n_rules)
48
49 def AddCustomBuildRule(self, name, cmd, description,
50 additional_dependencies,
51 outputs, extensions):
52 """Adds a rule to the tool file.
53
54 Args:
55 name: Name of the rule.
56 description: Description of the rule.
57 cmd: Command line of the rule.
58 additional_dependencies: other files which may trigger the rule.
59 outputs: outputs of the rule.
60 extensions: extensions handled by the rule.
61 """
62 n_rule = self.doc.createElement('CustomBuildRule')
63 n_rule.setAttribute('Name', name)
64 n_rule.setAttribute('ExecutionDescription', description)
65 n_rule.setAttribute('CommandLine', cmd)
66 n_rule.setAttribute('Outputs', ';'.join(outputs))
67 n_rule.setAttribute('FileExtensions', ';'.join(extensions))
68 n_rule.setAttribute('AdditionalDependencies',
69 ';'.join(additional_dependencies))
70 self.n_rules.appendChild(n_rule)
71
72 def Write(self, writer=common.WriteOnDiff):
73 """Writes the tool file."""
74 f = writer(self.tool_file_path)
75 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
76 f.close()
77
78 #------------------------------------------------------------------------------
+0
-182
third_party/gyp/pylib/gyp/MSVSUserFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio user preferences file writer."""
7
8 import common
9 import os
10 import re
11 import socket # for gethostname
12 import xml.dom
13 import xml.dom.minidom
14
15
16 #------------------------------------------------------------------------------
17
18 def _FindCommandInPath(command):
19 """If there are no slashes in the command given, this function
20 searches the PATH env to find the given command, and converts it
21 to an absolute path. We have to do this because MSVS is looking
22 for an actual file to launch a debugger on, not just a command
23 line. Note that this happens at GYP time, so anything needing to
24 be built needs to have a full path."""
25 if '/' in command or '\\' in command:
26 # If the command already has path elements (either relative or
27 # absolute), then assume it is constructed properly.
28 return command
29 else:
30 # Search through the path list and find an existing file that
31 # we can access.
32 paths = os.environ.get('PATH','').split(os.pathsep)
33 for path in paths:
34 item = os.path.join(path, command)
35 if os.path.isfile(item) and os.access(item, os.X_OK):
36 return item
37 return command
38
39 def _QuoteWin32CommandLineArgs(args):
40 new_args = []
41 for arg in args:
42 # Replace all double-quotes with double-double-quotes to escape
43 # them for cmd shell, and then quote the whole thing if there
44 # are any.
45 if arg.find('"') != -1:
46 arg = '""'.join(arg.split('"'))
47 arg = '"%s"' % arg
48
49 # Otherwise, if there are any spaces, quote the whole arg.
50 elif re.search(r'[ \t\n]', arg):
51 arg = '"%s"' % arg
52 new_args.append(arg)
53 return new_args
54
55 class Writer(object):
56 """Visual Studio XML user user file writer."""
57
58 def __init__(self, user_file_path, version):
59 """Initializes the user file.
60
61 Args:
62 user_file_path: Path to the user file.
63 """
64 self.user_file_path = user_file_path
65 self.version = version
66 self.doc = None
67
68 def Create(self, name):
69 """Creates the user file document.
70
71 Args:
72 name: Name of the user file.
73 """
74 self.name = name
75
76 # Create XML doc
77 xml_impl = xml.dom.getDOMImplementation()
78 self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
79
80 # Add attributes to root element
81 self.n_root = self.doc.documentElement
82 self.n_root.setAttribute('Version', self.version.ProjectVersion())
83 self.n_root.setAttribute('Name', self.name)
84
85 # Add configurations section
86 self.n_configs = self.doc.createElement('Configurations')
87 self.n_root.appendChild(self.n_configs)
88
89 def _AddConfigToNode(self, parent, config_type, config_name):
90 """Adds a configuration to the parent node.
91
92 Args:
93 parent: Destination node.
94 config_type: Type of configuration node.
95 config_name: Configuration name.
96 """
97 # Add configuration node and its attributes
98 n_config = self.doc.createElement(config_type)
99 n_config.setAttribute('Name', config_name)
100 parent.appendChild(n_config)
101
102 def AddConfig(self, name):
103 """Adds a configuration to the project.
104
105 Args:
106 name: Configuration name.
107 """
108 self._AddConfigToNode(self.n_configs, 'Configuration', name)
109
110
111 def AddDebugSettings(self, config_name, command, environment = {},
112 working_directory=""):
113 """Adds a DebugSettings node to the user file for a particular config.
114
115 Args:
116 command: command line to run. First element in the list is the
117 executable. All elements of the command will be quoted if
118 necessary.
119 working_directory: other files which may trigger the rule. (optional)
120 """
121 command = _QuoteWin32CommandLineArgs(command)
122
123 n_cmd = self.doc.createElement('DebugSettings')
124 abs_command = _FindCommandInPath(command[0])
125 n_cmd.setAttribute('Command', abs_command)
126 n_cmd.setAttribute('WorkingDirectory', working_directory)
127 n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
128 n_cmd.setAttribute('RemoteMachine', socket.gethostname())
129
130 if environment and isinstance(environment, dict):
131 n_cmd.setAttribute('Environment',
132 " ".join(['%s="%s"' % (key, val)
133 for (key,val) in environment.iteritems()]))
134 else:
135 n_cmd.setAttribute('Environment', '')
136
137 n_cmd.setAttribute('EnvironmentMerge', 'true')
138
139 # Currently these are all "dummy" values that we're just setting
140 # in the default manner that MSVS does it. We could use some of
141 # these to add additional capabilities, I suppose, but they might
142 # not have parity with other platforms then.
143 n_cmd.setAttribute('Attach', 'false')
144 n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
145 n_cmd.setAttribute('Remote', '1')
146 n_cmd.setAttribute('RemoteCommand', '')
147 n_cmd.setAttribute('HttpUrl', '')
148 n_cmd.setAttribute('PDBPath', '')
149 n_cmd.setAttribute('SQLDebugging', '')
150 n_cmd.setAttribute('DebuggerFlavor', '0')
151 n_cmd.setAttribute('MPIRunCommand', '')
152 n_cmd.setAttribute('MPIRunArguments', '')
153 n_cmd.setAttribute('MPIRunWorkingDirectory', '')
154 n_cmd.setAttribute('ApplicationCommand', '')
155 n_cmd.setAttribute('ApplicationArguments', '')
156 n_cmd.setAttribute('ShimCommand', '')
157 n_cmd.setAttribute('MPIAcceptMode', '')
158 n_cmd.setAttribute('MPIAcceptFilter', '')
159
160 # Find the config, and add it if it doesn't exist.
161 found = False
162 for config in self.n_configs.childNodes:
163 if config.getAttribute("Name") == config_name:
164 found = True
165
166 if not found:
167 self.AddConfig(config_name)
168
169 # Add the DebugSettings onto the appropriate config.
170 for config in self.n_configs.childNodes:
171 if config.getAttribute("Name") == config_name:
172 config.appendChild(n_cmd)
173 break
174
175 def Write(self, writer=common.WriteOnDiff):
176 """Writes the user file."""
177 f = writer(self.user_file_path)
178 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
179 f.close()
180
181 #------------------------------------------------------------------------------
+0
-151
third_party/gyp/pylib/gyp/MSVSVersion.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Handle version information related to Visual Stuio."""
7
8 import os
9 import re
10 import subprocess
11 import sys
12
13
14 class VisualStudioVersion:
15 """Information regarding a version of Visual Studio."""
16
17 def __init__(self, short_name, description,
18 solution_version, project_version, flat_sln):
19 self.short_name = short_name
20 self.description = description
21 self.solution_version = solution_version
22 self.project_version = project_version
23 self.flat_sln = flat_sln
24
25 def ShortName(self):
26 return self.short_name
27
28 def Description(self):
29 """Get the full description of the version."""
30 return self.description
31
32 def SolutionVersion(self):
33 """Get the version number of the sln files."""
34 return self.solution_version
35
36 def ProjectVersion(self):
37 """Get the version number of the vcproj files."""
38 return self.project_version
39
40 def FlatSolution(self):
41 return self.flat_sln
42
43
44 def _RegistryGetValue(key, value):
45 """Use reg.exe to read a paricular key.
46
47 While ideally we might use the win32 module, we would like gyp to be
48 python neutral, so for instance cygwin python lacks this module.
49
50 Arguments:
51 key: The registry key to read from.
52 value: The particular value to read.
53 Return:
54 The contents there, or None for failure.
55 """
56 # Skip if not on Windows.
57 if sys.platform not in ('win32', 'cygwin'):
58 return None
59 # Run reg.exe.
60 cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
61 'query', key, '/v', value]
62 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
63 text = p.communicate()[0]
64 # Require a successful return value.
65 if p.returncode:
66 return None
67 # Extract value.
68 match = re.search(r'REG_\w+[ ]+([^\r]+)\r\n', text)
69 if not match:
70 return None
71 return match.group(1)
72
73
74 def _CreateVersion(name):
75 versions = {
76 '2008': VisualStudioVersion('2008',
77 'Visual Studio 2008',
78 solution_version='10.00',
79 project_version='9.00',
80 flat_sln=False),
81 '2008e': VisualStudioVersion('2008e',
82 'Visual Studio 2008',
83 solution_version='10.00',
84 project_version='9.00',
85 flat_sln=True),
86 '2005': VisualStudioVersion('2005',
87 'Visual Studio 2005',
88 solution_version='9.00',
89 project_version='8.00',
90 flat_sln=False),
91 '2005e': VisualStudioVersion('2005e',
92 'Visual Studio 2005',
93 solution_version='9.00',
94 project_version='8.00',
95 flat_sln=True),
96 }
97 return versions[str(name)]
98
99
100 def _DetectVisualStudioVersions():
101 """Collect the list of installed visual studio versions.
102
103 Returns:
104 A list of visual studio versions installed in descending order of
105 usage preference.
106 Base this on the registry and a quick check if devenv.exe exists.
107 Only versions 8-9 are considered.
108 Possibilities are:
109 2005 - Visual Studio 2005 (8)
110 2008 - Visual Studio 2008 (9)
111 """
112 version_to_year = {'8.0': '2005', '9.0': '2008'}
113 versions = []
114 for version in ('9.0', '8.0'):
115 # Get the install dir for this version.
116 key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version
117 path = _RegistryGetValue(key, 'InstallDir')
118 if not path:
119 continue
120 # Check for full.
121 if os.path.exists(os.path.join(path, 'devenv.exe')):
122 # Add this one.
123 versions.append(_CreateVersion(version_to_year[version]))
124 # Check for express.
125 elif os.path.exists(os.path.join(path, 'vcexpress.exe')):
126 # Add this one.
127 versions.append(_CreateVersion(version_to_year[version] + 'e'))
128 return versions
129
130
131 def SelectVisualStudioVersion(version='auto'):
132 """Select which version of Visual Studio projects to generate.
133
134 Arguments:
135 version: Hook to allow caller to force a particular version (vs auto).
136 Returns:
137 An object representing a visual studio project format version.
138 """
139 # In auto mode, check environment variable for override.
140 if version == 'auto':
141 version = os.environ.get('GYP_MSVS_VERSION', 'auto')
142 # In auto mode, pick the most preferred version present.
143 if version == 'auto':
144 versions = _DetectVisualStudioVersions()
145 if not versions:
146 # Default to 2005.
147 return _CreateVersion('2005')
148 return versions[0]
149 # Convert version string into a version object.
150 return _CreateVersion(version)
+0
-200
third_party/gyp/pylib/gyp/SCons.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 SCons generator.
8
9 This contains class definitions and supporting functions for generating
10 pieces of SCons files for the different types of GYP targets.
11 """
12
13 import os
14
15
16 def WriteList(fp, list, prefix='',
17 separator=',\n ',
18 preamble=None,
19 postamble=None):
20 fp.write(preamble or '')
21 fp.write((separator or ' ').join([prefix + l for l in list]))
22 fp.write(postamble or '')
23
24
25 class TargetBase(object):
26 """
27 Base class for a SCons representation of a GYP target.
28 """
29 is_ignored = False
30 target_prefix = ''
31 target_suffix = ''
32 def __init__(self, spec):
33 self.spec = spec
34 def full_product_name(self):
35 """
36 Returns the full name of the product being built:
37
38 * Uses 'product_name' if it's set, else prefix + 'target_name'.
39 * Prepends 'product_dir' if set.
40 * Appends SCons suffix variables for the target type (or
41 product_extension).
42 """
43 suffix = self.target_suffix
44 product_extension = self.spec.get('product_extension')
45 if product_extension:
46 suffix = '.' + product_extension
47 prefix = self.spec.get('product_prefix', self.target_prefix)
48 name = self.spec['target_name']
49 name = prefix + self.spec.get('product_name', name) + suffix
50 product_dir = self.spec.get('product_dir')
51 if product_dir:
52 name = os.path.join(product_dir, name)
53 else:
54 name = os.path.join(self.out_dir, name)
55 return name
56
57 def write_input_files(self, fp):
58 """
59 Writes the definition of the input files (sources).
60 """
61 sources = self.spec.get('sources')
62 if not sources:
63 fp.write('\ninput_files = []\n')
64 return
65 preamble = '\ninput_files = [\n '
66 postamble = ',\n]\n'
67 WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
68
69 def builder_call(self):
70 """
71 Returns the actual SCons builder call to build this target.
72 """
73 name = self.full_product_name()
74 return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
75 def write_target(self, fp, src_dir='', pre=''):
76 """
77 Writes the lines necessary to build this target.
78 """
79 fp.write('\n' + pre)
80 fp.write('_outputs = %s\n' % self.builder_call())
81 fp.write('target_files.extend(_outputs)\n')
82
83
84 class NoneTarget(TargetBase):
85 """
86 A GYP target type of 'none', implicitly or explicitly.
87 """
88 def write_target(self, fp, pre=''):
89 fp.write('\ntarget_files.extend(input_files)\n')
90
91
92 class SettingsTarget(TargetBase):
93 """
94 A GYP target type of 'settings'.
95 """
96 is_ignored = True
97
98
99 compilable_sources_template = """
100 _result = []
101 for infile in input_files:
102 if env.compilable(infile):
103 if (type(infile) == type('')
104 and (infile.startswith(%(src_dir)r)
105 or not os.path.isabs(env.subst(infile)))):
106 # Force files below the build directory by replacing all '..'
107 # elements in the path with '__':
108 base, ext = os.path.splitext(os.path.normpath(infile))
109 base = [d == '..' and '__' or d for d in base.split('/')]
110 base = os.path.join(*base)
111 object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
112 if not infile.startswith(%(src_dir)r):
113 infile = %(src_dir)r + infile
114 infile = env.%(name)s(object, infile)[0]
115 else:
116 infile = env.%(name)s(infile)[0]
117 _result.append(infile)
118 input_files = _result
119 """
120
121 class CompilableSourcesTargetBase(TargetBase):
122 """
123 An abstract base class for targets that compile their source files.
124
125 We explicitly transform compilable files into object files,
126 even though SCons could infer that for us, because we want
127 to control where the object file ends up. (The implicit rules
128 in SCons always put the object file next to the source file.)
129 """
130 intermediate_builder_name = None
131 def write_target(self, fp, src_dir='', pre=''):
132 if self.intermediate_builder_name is None:
133 raise NotImplementedError
134 if src_dir and not src_dir.endswith('/'):
135 src_dir += '/'
136 variables = {
137 'src_dir': src_dir,
138 'name': self.intermediate_builder_name,
139 }
140 fp.write(compilable_sources_template % variables)
141 super(CompilableSourcesTargetBase, self).write_target(fp)
142
143
144 class ProgramTarget(CompilableSourcesTargetBase):
145 """
146 A GYP target type of 'executable'.
147 """
148 builder_name = 'GypProgram'
149 intermediate_builder_name = 'StaticObject'
150 target_prefix = '${PROGPREFIX}'
151 target_suffix = '${PROGSUFFIX}'
152 out_dir = '${TOP_BUILDDIR}'
153
154
155 class StaticLibraryTarget(CompilableSourcesTargetBase):
156 """
157 A GYP target type of 'static_library'.
158 """
159 builder_name = 'GypStaticLibrary'
160 intermediate_builder_name = 'StaticObject'
161 target_prefix = '${LIBPREFIX}'
162 target_suffix = '${LIBSUFFIX}'
163 out_dir = '${LIB_DIR}'
164
165
166 class SharedLibraryTarget(CompilableSourcesTargetBase):
167 """
168 A GYP target type of 'shared_library'.
169 """
170 builder_name = 'GypSharedLibrary'
171 intermediate_builder_name = 'SharedObject'
172 target_prefix = '${SHLIBPREFIX}'
173 target_suffix = '${SHLIBSUFFIX}'
174 out_dir = '${LIB_DIR}'
175
176
177 class LoadableModuleTarget(CompilableSourcesTargetBase):
178 """
179 A GYP target type of 'loadable_module'.
180 """
181 builder_name = 'GypLoadableModule'
182 intermediate_builder_name = 'SharedObject'
183 target_prefix = '${SHLIBPREFIX}'
184 target_suffix = '${SHLIBSUFFIX}'
185 out_dir = '${TOP_BUILDDIR}'
186
187
188 TargetMap = {
189 None : NoneTarget,
190 'none' : NoneTarget,
191 'settings' : SettingsTarget,
192 'executable' : ProgramTarget,
193 'static_library' : StaticLibraryTarget,
194 'shared_library' : SharedLibraryTarget,
195 'loadable_module' : LoadableModuleTarget,
196 }
197
198 def Target(spec):
199 return TargetMap[spec.get('type')](spec)
+0
-461
third_party/gyp/pylib/gyp/__init__.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import copy
7 import gyp.input
8 import optparse
9 import os.path
10 import re
11 import shlex
12 import sys
13
14 # Default debug modes for GYP
15 debug = {}
16
17 # List of "official" debug modes, but you can use anything you like.
18 DEBUG_GENERAL = 'general'
19 DEBUG_VARIABLES = 'variables'
20 DEBUG_INCLUDES = 'includes'
21
22 def DebugOutput(mode, message):
23 if mode in gyp.debug.keys():
24 print "%s: %s" % (mode.upper(), message)
25
26 def FindBuildFiles():
27 extension = '.gyp'
28 files = os.listdir(os.getcwd())
29 build_files = []
30 for file in files:
31 if file[-len(extension):] == extension:
32 build_files.append(file)
33 return build_files
34
35
36 def Load(build_files, format, default_variables={},
37 includes=[], depth='.', params={}, check=False, circular_check=True):
38 """
39 Loads one or more specified build files.
40 default_variables and includes will be copied before use.
41 Returns the generator for the specified format and the
42 data returned by loading the specified build files.
43 """
44 default_variables = copy.copy(default_variables)
45
46 # Default variables provided by this program and its modules should be
47 # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
48 # avoiding collisions with user and automatic variables.
49 default_variables['GENERATOR'] = format
50
51 generator_name = 'gyp.generator.' + format
52 # These parameters are passed in order (as opposed to by key)
53 # because ActivePython cannot handle key parameters to __import__.
54 generator = __import__(generator_name, globals(), locals(), generator_name)
55 for (key, val) in generator.generator_default_variables.items():
56 default_variables.setdefault(key, val)
57
58 # Give the generator the opportunity to set additional variables based on
59 # the params it will receive in the output phase.
60 if getattr(generator, 'CalculateVariables', None):
61 generator.CalculateVariables(default_variables, params)
62
63 # Fetch the generator specific info that gets fed to input, we use getattr
64 # so we can default things and the generators only have to provide what
65 # they need.
66 generator_input_info = {
67 'generator_wants_absolute_build_file_paths':
68 getattr(generator, 'generator_wants_absolute_build_file_paths', False),
69 'generator_handles_variants':
70 getattr(generator, 'generator_handles_variants', False),
71 'non_configuration_keys':
72 getattr(generator, 'generator_additional_non_configuration_keys', []),
73 'path_sections':
74 getattr(generator, 'generator_additional_path_sections', []),
75 'extra_sources_for_rules':
76 getattr(generator, 'generator_extra_sources_for_rules', []),
77 'generator_supports_multiple_toolsets':
78 getattr(generator, 'generator_supports_multiple_toolsets', False),
79 }
80
81 # Process the input specific to this generator.
82 result = gyp.input.Load(build_files, default_variables, includes[:],
83 depth, generator_input_info, check, circular_check)
84 return [generator] + result
85
86 def NameValueListToDict(name_value_list):
87 """
88 Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
89 of the pairs. If a string is simply NAME, then the value in the dictionary
90 is set to True. If VALUE can be converted to an integer, it is.
91 """
92 result = { }
93 for item in name_value_list:
94 tokens = item.split('=', 1)
95 if len(tokens) == 2:
96 # If we can make it an int, use that, otherwise, use the string.
97 try:
98 token_value = int(tokens[1])
99 except ValueError:
100 token_value = tokens[1]
101 # Set the variable to the supplied value.
102 result[tokens[0]] = token_value
103 else:
104 # No value supplied, treat it as a boolean and set it.
105 result[tokens[0]] = True
106 return result
107
108 def ShlexEnv(env_name):
109 flags = os.environ.get(env_name, [])
110 if flags:
111 flags = shlex.split(flags)
112 return flags
113
114 def FormatOpt(opt, value):
115 if opt.startswith('--'):
116 return '%s=%s' % (opt, value)
117 return opt + value
118
119 def RegenerateAppendFlag(flag, values, predicate, env_name, options):
120 """Regenerate a list of command line flags, for an option of action='append'.
121
122 The |env_name|, if given, is checked in the environment and used to generate
123 an initial list of options, then the options that were specified on the
124 command line (given in |values|) are appended. This matches the handling of
125 environment variables and command line flags where command line flags override
126 the environment, while not requiring the environment to be set when the flags
127 are used again.
128 """
129 flags = []
130 if options.use_environment and env_name:
131 for flag_value in ShlexEnv(env_name):
132 flags.append(FormatOpt(flag, predicate(flag_value)))
133 if values:
134 for flag_value in values:
135 flags.append(FormatOpt(flag, predicate(flag_value)))
136 return flags
137
138 def RegenerateFlags(options):
139 """Given a parsed options object, and taking the environment variables into
140 account, returns a list of flags that should regenerate an equivalent options
141 object (even in the absence of the environment variables.)
142
143 Any path options will be normalized relative to depth.
144
145 The format flag is not included, as it is assumed the calling generator will
146 set that as appropriate.
147 """
148 def FixPath(path):
149 path = gyp.common.FixIfRelativePath(path, options.depth)
150 if not path:
151 return os.path.curdir
152 return path
153
154 def Noop(value):
155 return value
156
157 # We always want to ignore the environment when regenerating, to avoid
158 # duplicate or changed flags in the environment at the time of regeneration.
159 flags = ['--ignore-environment']
160 for name, metadata in options._regeneration_metadata.iteritems():
161 opt = metadata['opt']
162 value = getattr(options, name)
163 value_predicate = metadata['type'] == 'path' and FixPath or Noop
164 action = metadata['action']
165 env_name = metadata['env_name']
166 if action == 'append':
167 flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
168 env_name, options))
169 elif action in ('store', None): # None is a synonym for 'store'.
170 if value:
171 flags.append(FormatOpt(opt, value_predicate(value)))
172 elif options.use_environment and env_name and os.environ.get(env_name):
173 flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
174 elif action in ('store_true', 'store_false'):
175 if ((action == 'store_true' and value) or
176 (action == 'store_false' and not value)):
177 flags.append(opt)
178 elif options.use_environment and env_name:
179 print >>sys.stderr, ('Warning: environment regeneration unimplemented '
180 'for %s flag %r env_name %r' % (action, opt,
181 env_name))
182 else:
183 print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
184 'flag %r' % (action, opt))
185
186 return flags
187
188 class RegeneratableOptionParser(optparse.OptionParser):
189 def __init__(self):
190 self.__regeneratable_options = {}
191 optparse.OptionParser.__init__(self)
192
193 def add_option(self, *args, **kw):
194 """Add an option to the parser.
195
196 This accepts the same arguments as OptionParser.add_option, plus the
197 following:
198 regenerate: can be set to False to prevent this option from being included
199 in regeneration.
200 env_name: name of environment variable that additional values for this
201 option come from.
202 type: adds type='path', to tell the regenerator that the values of
203 this option need to be made relative to options.depth
204 """
205 env_name = kw.pop('env_name', None)
206 if 'dest' in kw and kw.pop('regenerate', True):
207 dest = kw['dest']
208
209 # The path type is needed for regenerating, for optparse we can just treat
210 # it as a string.
211 type = kw.get('type')
212 if type == 'path':
213 kw['type'] = 'string'
214
215 self.__regeneratable_options[dest] = {
216 'action': kw.get('action'),
217 'type': type,
218 'env_name': env_name,
219 'opt': args[0],
220 }
221
222 optparse.OptionParser.add_option(self, *args, **kw)
223
224 def parse_args(self, *args):
225 values, args = optparse.OptionParser.parse_args(self, *args)
226 values._regeneration_metadata = self.__regeneratable_options
227 return values, args
228
229 def main(args):
230 my_name = os.path.basename(sys.argv[0])
231
232 parser = RegeneratableOptionParser()
233 usage = 'usage: %s [options ...] [build_file ...]'
234 parser.set_usage(usage.replace('%s', '%prog'))
235 parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
236 env_name='GYP_DEFINES',
237 help='sets variable VAR to value VAL')
238 parser.add_option('-f', '--format', dest='formats', action='append',
239 env_name='GYP_GENERATORS', regenerate=False,
240 help='output formats to generate')
241 parser.add_option('--msvs-version', dest='msvs_version',
242 regenerate=False,
243 help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
244 parser.add_option('-I', '--include', dest='includes', action='append',
245 metavar='INCLUDE', type='path',
246 help='files to include in all loaded .gyp files')
247 parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
248 help='set DEPTH gyp variable to a relative path to PATH')
249 parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
250 action='append', default=[], help='turn on a debugging '
251 'mode for debugging GYP. Supported modes are "variables" '
252 'and "general"')
253 parser.add_option('-S', '--suffix', dest='suffix', default='',
254 help='suffix to add to generated files')
255 parser.add_option('-G', dest='generator_flags', action='append', default=[],
256 metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
257 help='sets generator flag FLAG to VAL')
258 parser.add_option('--generator-output', dest='generator_output',
259 action='store', default=None, metavar='DIR', type='path',
260 env_name='GYP_GENERATOR_OUTPUT',
261 help='puts generated build files under DIR')
262 parser.add_option('--ignore-environment', dest='use_environment',
263 action='store_false', default=True, regenerate=False,
264 help='do not read options from environment variables')
265 parser.add_option('--check', dest='check', action='store_true',
266 help='check format of gyp files')
267 parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
268 default=None, metavar='DIR', type='path',
269 help='directory to use as the root of the source tree')
270 # --no-circular-check disables the check for circular relationships between
271 # .gyp files. These relationships should not exist, but they've only been
272 # observed to be harmful with the Xcode generator. Chromium's .gyp files
273 # currently have some circular relationships on non-Mac platforms, so this
274 # option allows the strict behavior to be used on Macs and the lenient
275 # behavior to be used elsewhere.
276 # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
277 parser.add_option('--no-circular-check', dest='circular_check',
278 action='store_false', default=True, regenerate=False,
279 help="don't check for circular relationships between files")
280
281 # We read a few things from ~/.gyp, so set up a var for that.
282 home_vars = ['HOME']
283 if sys.platform in ('cygwin', 'win32'):
284 home_vars.append('USERPROFILE')
285 home = None
286 home_dot_gyp = None
287 for home_var in home_vars:
288 home = os.getenv(home_var)
289 if home != None:
290 home_dot_gyp = os.path.join(home, '.gyp')
291 if not os.path.exists(home_dot_gyp):
292 home_dot_gyp = None
293 else:
294 break
295
296 # TODO(thomasvl): add support for ~/.gyp/defaults
297
298 options, build_files_arg = parser.parse_args(args)
299 build_files = build_files_arg
300
301 if not options.formats:
302 # If no format was given on the command line, then check the env variable.
303 generate_formats = []
304 if options.use_environment:
305 generate_formats = os.environ.get('GYP_GENERATORS', [])
306 if generate_formats:
307 generate_formats = re.split('[\s,]', generate_formats)
308 if generate_formats:
309 options.formats = generate_formats
310 else:
311 # Nothing in the variable, default based on platform.
312 options.formats = [ {'darwin': 'xcode',
313 'win32': 'msvs',
314 'cygwin': 'msvs',
315 'freebsd7': 'make',
316 'freebsd8': 'make',
317 'linux2': 'make',
318 'openbsd4': 'make',
319 'sunos5': 'make',}[sys.platform] ]
320
321 if not options.generator_output and options.use_environment:
322 g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
323 if g_o:
324 options.generator_output = g_o
325
326 for mode in options.debug:
327 gyp.debug[mode] = 1
328
329 # Do an extra check to avoid work when we're not debugging.
330 if DEBUG_GENERAL in gyp.debug.keys():
331 DebugOutput(DEBUG_GENERAL, 'running with these options:')
332 for option, value in sorted(options.__dict__.items()):
333 if option[0] == '_':
334 continue
335 if isinstance(value, basestring):
336 DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
337 else:
338 DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
339
340 if not build_files:
341 build_files = FindBuildFiles()
342 if not build_files:
343 print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
344 (my_name, my_name)
345 return 1
346
347 # TODO(mark): Chromium-specific hack!
348 # For Chromium, the gyp "depth" variable should always be a relative path
349 # to Chromium's top-level "src" directory. If no depth variable was set
350 # on the command line, try to find a "src" directory by looking at the
351 # absolute path to each build file's directory. The first "src" component
352 # found will be treated as though it were the path used for --depth.
353 if not options.depth:
354 for build_file in build_files:
355 build_file_dir = os.path.abspath(os.path.dirname(build_file))
356 build_file_dir_components = build_file_dir.split(os.path.sep)
357 components_len = len(build_file_dir_components)
358 for index in xrange(components_len - 1, -1, -1):
359 if build_file_dir_components[index] == 'src':
360 options.depth = os.path.sep.join(build_file_dir_components)
361 break
362 del build_file_dir_components[index]
363
364 # If the inner loop found something, break without advancing to another
365 # build file.
366 if options.depth:
367 break
368
369 if not options.depth:
370 raise Exception, \
371 'Could not automatically locate src directory. This is a ' + \
372 'temporary Chromium feature that will be removed. Use ' + \
373 '--depth as a workaround.'
374
375 # If toplevel-dir is not set, we assume that depth is the root of our source
376 # tree.
377 if not options.toplevel_dir:
378 options.toplevel_dir = options.depth
379
380 # -D on the command line sets variable defaults - D isn't just for define,
381 # it's for default. Perhaps there should be a way to force (-F?) a
382 # variable's value so that it can't be overridden by anything else.
383 cmdline_default_variables = {}
384 defines = []
385 if options.use_environment:
386 defines += ShlexEnv('GYP_DEFINES')
387 if options.defines:
388 defines += options.defines
389 cmdline_default_variables = NameValueListToDict(defines)
390 if DEBUG_GENERAL in gyp.debug.keys():
391 DebugOutput(DEBUG_GENERAL,
392 "cmdline_default_variables: %s" % cmdline_default_variables)
393
394 # Set up includes.
395 includes = []
396
397 # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
398 # .gyp file that's loaded, before anything else is included.
399 if home_dot_gyp != None:
400 default_include = os.path.join(home_dot_gyp, 'include.gypi')
401 if os.path.exists(default_include):
402 includes.append(default_include)
403
404 # Command-line --include files come after the default include.
405 if options.includes:
406 includes.extend(options.includes)
407
408 # Generator flags should be prefixed with the target generator since they
409 # are global across all generator runs.
410 gen_flags = []
411 if options.use_environment:
412 gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
413 if options.generator_flags:
414 gen_flags += options.generator_flags
415 generator_flags = NameValueListToDict(gen_flags)
416 if DEBUG_GENERAL in gyp.debug.keys():
417 DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
418
419 # TODO: Remove this and the option after we've gotten folks to move to the
420 # generator flag.
421 if options.msvs_version:
422 print >>sys.stderr, \
423 'DEPRECATED: Use generator flag (-G msvs_version=' + \
424 options.msvs_version + ') instead of --msvs-version=' + \
425 options.msvs_version
426 generator_flags['msvs_version'] = options.msvs_version
427
428 # Generate all requested formats (use a set in case we got one format request
429 # twice)
430 for format in set(options.formats):
431 params = {'options': options,
432 'build_files': build_files,
433 'generator_flags': generator_flags,
434 'cwd': os.getcwd(),
435 'build_files_arg': build_files_arg,
436 'gyp_binary': sys.argv[0],
437 'home_dot_gyp': home_dot_gyp}
438
439 # Start with the default variables from the command line.
440 [generator, flat_list, targets, data] = Load(build_files, format,
441 cmdline_default_variables,
442 includes, options.depth,
443 params, options.check,
444 options.circular_check)
445
446 # TODO(mark): Pass |data| for now because the generator needs a list of
447 # build files that came in. In the future, maybe it should just accept
448 # a list, and not the whole data dict.
449 # NOTE: flat_list is the flattened dependency graph specifying the order
450 # that targets may be built. Build systems that operate serially or that
451 # need to have dependencies defined before dependents reference them should
452 # generate targets in the order specified in flat_list.
453 generator.GenerateOutput(flat_list, targets, data, params)
454
455 # Done
456 return 0
457
458
459 if __name__ == '__main__':
460 sys.exit(main(sys.argv[1:]))
+0
-343
third_party/gyp/pylib/gyp/common.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import errno
7 import filecmp
8 import os.path
9 import re
10 import tempfile
11 import sys
12
13 def ExceptionAppend(e, msg):
14 """Append a message to the given exception's message."""
15 if not e.args:
16 e.args = (msg,)
17 elif len(e.args) == 1:
18 e.args = (str(e.args[0]) + ' ' + msg,)
19 else:
20 e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
21
22
23 def ParseQualifiedTarget(target):
24 # Splits a qualified target into a build file, target name and toolset.
25
26 # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
27 target_split = target.rsplit(':', 1)
28 if len(target_split) == 2:
29 [build_file, target] = target_split
30 else:
31 build_file = None
32
33 target_split = target.rsplit('#', 1)
34 if len(target_split) == 2:
35 [target, toolset] = target_split
36 else:
37 toolset = None
38
39 return [build_file, target, toolset]
40
41
42 def ResolveTarget(build_file, target, toolset):
43 # This function resolves a target into a canonical form:
44 # - a fully defined build file, either absolute or relative to the current
45 # directory
46 # - a target name
47 # - a toolset
48 #
49 # build_file is the file relative to which 'target' is defined.
50 # target is the qualified target.
51 # toolset is the default toolset for that target.
52 [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
53
54 if parsed_build_file:
55 if build_file:
56 # If a relative path, parsed_build_file is relative to the directory
57 # containing build_file. If build_file is not in the current directory,
58 # parsed_build_file is not a usable path as-is. Resolve it by
59 # interpreting it as relative to build_file. If parsed_build_file is
60 # absolute, it is usable as a path regardless of the current directory,
61 # and os.path.join will return it as-is.
62 build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
63 parsed_build_file))
64 else:
65 build_file = parsed_build_file
66
67 if parsed_toolset:
68 toolset = parsed_toolset
69
70 return [build_file, target, toolset]
71
72
73 def BuildFile(fully_qualified_target):
74 # Extracts the build file from the fully qualified target.
75 return ParseQualifiedTarget(fully_qualified_target)[0]
76
77
78 def QualifiedTarget(build_file, target, toolset):
79 # "Qualified" means the file that a target was defined in and the target
80 # name, separated by a colon, suffixed by a # and the toolset name:
81 # /path/to/file.gyp:target_name#toolset
82 fully_qualified = build_file + ':' + target
83 if toolset:
84 fully_qualified = fully_qualified + '#' + toolset
85 return fully_qualified
86
87
88 def RelativePath(path, relative_to):
89 # Assuming both |path| and |relative_to| are relative to the current
90 # directory, returns a relative path that identifies path relative to
91 # relative_to.
92
93 # Convert to absolute (and therefore normalized paths).
94 path = os.path.abspath(path)
95 relative_to = os.path.abspath(relative_to)
96
97 # Split the paths into components.
98 path_split = path.split(os.path.sep)
99 relative_to_split = relative_to.split(os.path.sep)
100
101 # Determine how much of the prefix the two paths share.
102 prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
103
104 # Put enough ".." components to back up out of relative_to to the common
105 # prefix, and then append the part of path_split after the common prefix.
106 relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
107 path_split[prefix_len:]
108
109 if len(relative_split) == 0:
110 # The paths were the same.
111 return ''
112
113 # Turn it back into a string and we're done.
114 return os.path.join(*relative_split)
115
116
117 def FixIfRelativePath(path, relative_to):
118 # Like RelativePath but returns |path| unchanged if it is absolute.
119 if os.path.isabs(path):
120 return path
121 return RelativePath(path, relative_to)
122
123
124 def UnrelativePath(path, relative_to):
125 # Assuming that |relative_to| is relative to the current directory, and |path|
126 # is a path relative to the dirname of |relative_to|, returns a path that
127 # identifies |path| relative to the current directory.
128 rel_dir = os.path.dirname(relative_to)
129 return os.path.normpath(os.path.join(rel_dir, path))
130
131
132 # re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
133 # http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
134 # and the documentation for various shells.
135
136 # _quote is a pattern that should match any argument that needs to be quoted
137 # with double-quotes by EncodePOSIXShellArgument. It matches the following
138 # characters appearing anywhere in an argument:
139 # \t, \n, space parameter separators
140 # # comments
141 # $ expansions (quoted to always expand within one argument)
142 # % called out by IEEE 1003.1 XCU.2.2
143 # & job control
144 # ' quoting
145 # (, ) subshell execution
146 # *, ?, [ pathname expansion
147 # ; command delimiter
148 # <, >, | redirection
149 # = assignment
150 # {, } brace expansion (bash)
151 # ~ tilde expansion
152 # It also matches the empty string, because "" (or '') is the only way to
153 # represent an empty string literal argument to a POSIX shell.
154 #
155 # This does not match the characters in _escape, because those need to be
156 # backslash-escaped regardless of whether they appear in a double-quoted
157 # string.
158 _quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
159
160 # _escape is a pattern that should match any character that needs to be
161 # escaped with a backslash, whether or not the argument matched the _quote
162 # pattern. _escape is used with re.sub to backslash anything in _escape's
163 # first match group, hence the (parentheses) in the regular expression.
164 #
165 # _escape matches the following characters appearing anywhere in an argument:
166 # " to prevent POSIX shells from interpreting this character for quoting
167 # \ to prevent POSIX shells from interpreting this character for escaping
168 # ` to prevent POSIX shells from interpreting this character for command
169 # substitution
170 # Missing from this list is $, because the desired behavior of
171 # EncodePOSIXShellArgument is to permit parameter (variable) expansion.
172 #
173 # Also missing from this list is !, which bash will interpret as the history
174 # expansion character when history is enabled. bash does not enable history
175 # by default in non-interactive shells, so this is not thought to be a problem.
176 # ! was omitted from this list because bash interprets "\!" as a literal string
177 # including the backslash character (avoiding history expansion but retaining
178 # the backslash), which would not be correct for argument encoding. Handling
179 # this case properly would also be problematic because bash allows the history
180 # character to be changed with the histchars shell variable. Fortunately,
181 # as history is not enabled in non-interactive shells and
182 # EncodePOSIXShellArgument is only expected to encode for non-interactive
183 # shells, there is no room for error here by ignoring !.
184 _escape = re.compile(r'(["\\`])')
185
186 def EncodePOSIXShellArgument(argument):
187 """Encodes |argument| suitably for consumption by POSIX shells.
188
189 argument may be quoted and escaped as necessary to ensure that POSIX shells
190 treat the returned value as a literal representing the argument passed to
191 this function. Parameter (variable) expansions beginning with $ are allowed
192 to remain intact without escaping the $, to allow the argument to contain
193 references to variables to be expanded by the shell.
194 """
195
196 if not isinstance(argument, str):
197 argument = str(argument)
198
199 if _quote.search(argument):
200 quote = '"'
201 else:
202 quote = ''
203
204 encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
205
206 return encoded
207
208
209 def EncodePOSIXShellList(list):
210 """Encodes |list| suitably for consumption by POSIX shells.
211
212 Returns EncodePOSIXShellArgument for each item in list, and joins them
213 together using the space character as an argument separator.
214 """
215
216 encoded_arguments = []
217 for argument in list:
218 encoded_arguments.append(EncodePOSIXShellArgument(argument))
219 return ' '.join(encoded_arguments)
220
221
222 def DeepDependencyTargets(target_dicts, roots):
223 """Returns the recursive list of target dependencies.
224 """
225 dependencies = set()
226 for r in roots:
227 spec = target_dicts[r]
228 r_deps = list(set((spec.get('dependencies', []) +
229 spec.get('dependencies_original', []))))
230 for d in r_deps:
231 if d not in roots:
232 dependencies.add(d)
233 for d in DeepDependencyTargets(target_dicts, r_deps):
234 if d not in roots:
235 dependencies.add(d)
236 return list(dependencies)
237
238
239 def BuildFileTargets(target_list, build_file):
240 """From a target_list, returns the subset from the specified build_file.
241 """
242 return [p for p in target_list if BuildFile(p) == build_file]
243
244
245 def AllTargets(target_list, target_dicts, build_file):
246 """Returns all targets (direct and dependencies) for the specified build_file.
247 """
248 bftargets = BuildFileTargets(target_list, build_file)
249 deptargets = DeepDependencyTargets(target_dicts, bftargets)
250 return bftargets + deptargets
251
252
253 def WriteOnDiff(filename):
254 """Write to a file only if the new contents differ.
255
256 Arguments:
257 filename: name of the file to potentially write to.
258 Returns:
259 A file like object which will write to temporary file and only overwrite
260 the target if it differs (on close).
261 """
262
263 class Writer:
264 """Wrapper around file which only covers the target if it differs."""
265 def __init__(self):
266 # Pick temporary file.
267 tmp_fd, self.tmp_path = tempfile.mkstemp(
268 suffix='.tmp',
269 prefix=os.path.split(filename)[1] + '.gyp.',
270 dir=os.path.split(filename)[0])
271 try:
272 self.tmp_file = os.fdopen(tmp_fd, 'wb')
273 except Exception:
274 # Don't leave turds behind.
275 os.unlink(self.tmp_path)
276 raise
277
278 def __getattr__(self, attrname):
279 # Delegate everything else to self.tmp_file
280 return getattr(self.tmp_file, attrname)
281
282 def close(self):
283 try:
284 # Close tmp file.
285 self.tmp_file.close()
286 # Determine if different.
287 same = False
288 try:
289 same = filecmp.cmp(self.tmp_path, filename, False)
290 except OSError, e:
291 if e.errno != errno.ENOENT:
292 raise
293
294 if same:
295 # The new file is identical to the old one, just get rid of the new
296 # one.
297 os.unlink(self.tmp_path)
298 else:
299 # The new file is different from the old one, or there is no old one.
300 # Rename the new file to the permanent name.
301 #
302 # tempfile.mkstemp uses an overly restrictive mode, resulting in a
303 # file that can only be read by the owner, regardless of the umask.
304 # There's no reason to not respect the umask here, which means that
305 # an extra hoop is required to fetch it and reset the new file's mode.
306 #
307 # No way to get the umask without setting a new one? Set a safe one
308 # and then set it back to the old value.
309 umask = os.umask(077)
310 os.umask(umask)
311 os.chmod(self.tmp_path, 0666 & ~umask)
312 if sys.platform == 'win32' and os.path.exists(filename):
313 # NOTE: on windows (but not cygwin) rename will not replace an
314 # existing file, so it must be preceded with a remove. Sadly there
315 # is no way to make the switch atomic.
316 os.remove(filename)
317 os.rename(self.tmp_path, filename)
318 except Exception:
319 # Don't leave turds behind.
320 os.unlink(self.tmp_path)
321 raise
322
323 return Writer()
324
325
326 # From Alex Martelli,
327 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
328 # ASPN: Python Cookbook: Remove duplicates from a sequence
329 # First comment, dated 2001/10/13.
330 # (Also in the printed Python Cookbook.)
331
332 def uniquer(seq, idfun=None):
333 if idfun is None:
334 def idfun(x): return x
335 seen = {}
336 result = []
337 for item in seq:
338 marker = idfun(item)
339 if marker in seen: continue
340 seen[marker] = 1
341 result.append(item)
342 return result
+0
-0
third_party/gyp/pylib/gyp/generator/__init__.py less more
(Empty file)
+0
-88
third_party/gyp/pylib/gyp/generator/gypd.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypd output module
7
8 This module produces gyp input as its output. Output files are given the
9 .gypd extension to avoid overwriting the .gyp files that they are generated
10 from. Internal references to .gyp files (such as those found in
11 "dependencies" sections) are not adjusted to point to .gypd files instead;
12 unlike other paths, which are relative to the .gyp or .gypd file, such paths
13 are relative to the directory from which gyp was run to create the .gypd file.
14
15 This generator module is intended to be a sample and a debugging aid, hence
16 the "d" for "debug" in .gypd. It is useful to inspect the results of the
17 various merges, expansions, and conditional evaluations performed by gyp
18 and to see a representation of what would be fed to a generator module.
19
20 It's not advisable to rename .gypd files produced by this module to .gyp,
21 because they will have all merges, expansions, and evaluations already
22 performed and the relevant constructs not present in the output; paths to
23 dependencies may be wrong; and various sections that do not belong in .gyp
24 files such as such as "included_files" and "*_excluded" will be present.
25 Output will also be stripped of comments. This is not intended to be a
26 general-purpose gyp pretty-printer; for that, you probably just want to
27 run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
28 comments but won't do all of the other things done to this module's output.
29
30 The specific formatting of the output generated by this module is subject
31 to change.
32 """
33
34
35 import gyp.common
36 import errno
37 import os
38 import pprint
39
40
41 # These variables should just be spit back out as variable references.
42 _generator_identity_variables = [
43 'EXECUTABLE_PREFIX',
44 'EXECUTABLE_SUFFIX',
45 'INTERMEDIATE_DIR',
46 'PRODUCT_DIR',
47 'RULE_INPUT_ROOT',
48 'RULE_INPUT_EXT',
49 'RULE_INPUT_NAME',
50 'RULE_INPUT_PATH',
51 'SHARED_INTERMEDIATE_DIR',
52 ]
53
54 # gypd doesn't define a default value for OS like many other generator
55 # modules. Specify "-D OS=whatever" on the command line to provide a value.
56 generator_default_variables = {
57 }
58
59 # gypd supports multiple toolsets
60 generator_supports_multiple_toolsets = True
61
62 # TODO(mark): This always uses <, which isn't right. The input module should
63 # notify the generator to tell it which phase it is operating in, and this
64 # module should use < for the early phase and then switch to > for the late
65 # phase. Bonus points for carrying @ back into the output too.
66 for v in _generator_identity_variables:
67 generator_default_variables[v] = '<(%s)' % v
68
69
70 def GenerateOutput(target_list, target_dicts, data, params):
71 output_files = {}
72 for qualified_target in target_list:
73 [input_file, target] = \
74 gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
75
76 if input_file[-4:] != '.gyp':
77 continue
78 input_file_stem = input_file[:-4]
79 output_file = input_file_stem + params['options'].suffix + '.gypd'
80
81 if not output_file in output_files:
82 output_files[output_file] = input_file
83
84 for output_file, input_file in output_files.iteritems():
85 output = open(output_file, 'w')
86 pprint.pprint(data[input_file], output)
87 output.close()
+0
-57
third_party/gyp/pylib/gyp/generator/gypsh.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypsh output module
7
8 gypsh is a GYP shell. It's not really a generator per se. All it does is
9 fire up an interactive Python session with a few local variables set to the
10 variables passed to the generator. Like gypd, it's intended as a debugging
11 aid, to facilitate the exploration of .gyp structures after being processed
12 by the input module.
13
14 The expected usage is "gyp -f gypsh -D OS=desired_os".
15 """
16
17
18 import code
19 import sys
20
21
22 # All of this stuff about generator variables was lovingly ripped from gypd.py.
23 # That module has a much better description of what's going on and why.
24 _generator_identity_variables = [
25 'EXECUTABLE_PREFIX',
26 'EXECUTABLE_SUFFIX',
27 'INTERMEDIATE_DIR',
28 'PRODUCT_DIR',
29 'RULE_INPUT_ROOT',
30 'RULE_INPUT_EXT',
31 'RULE_INPUT_NAME',
32 'RULE_INPUT_PATH',
33 'SHARED_INTERMEDIATE_DIR',
34 ]
35
36 generator_default_variables = {
37 }
38
39 for v in _generator_identity_variables:
40 generator_default_variables[v] = '<(%s)' % v
41
42
43 def GenerateOutput(target_list, target_dicts, data, params):
44 locals = {
45 'target_list': target_list,
46 'target_dicts': target_dicts,
47 'data': data,
48 }
49
50 # Use a banner that looks like the stock Python one and like what
51 # code.interact uses by default, but tack on something to indicate what
52 # locals are available, and identify gypsh.
53 banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
54 (sys.version, sys.platform, repr(sorted(locals.keys())))
55
56 code.interact(banner, local=locals)
+0
-1286
third_party/gyp/pylib/gyp/generator/make.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # Notes:
7 #
8 # This is all roughly based on the Makefile system used by the Linux
9 # kernel, but is a non-recursive make -- we put the entire dependency
10 # graph in front of make and let it figure it out.
11 #
12 # The code below generates a separate .mk file for each target, but
13 # all are sourced by the top-level Makefile. This means that all
14 # variables in .mk-files clobber one another. Be careful to use :=
15 # where appropriate for immediate evaluation, and similarly to watch
16 # that you're not relying on a variable value to last beween different
17 # .mk files.
18 #
19 # TODOs:
20 #
21 # Global settings and utility functions are currently stuffed in the
22 # toplevel Makefile. It may make sense to generate some .mk files on
23 # the side to keep the the files readable.
24
25 import gyp
26 import gyp.common
27 import os.path
28
29 # Debugging-related imports -- remove me once we're solid.
30 import code
31 import pprint
32
33 generator_default_variables = {
34 'EXECUTABLE_PREFIX': '',
35 'EXECUTABLE_SUFFIX': '',
36 'OS': 'linux',
37 'STATIC_LIB_PREFIX': 'lib',
38 'SHARED_LIB_PREFIX': 'lib',
39 'STATIC_LIB_SUFFIX': '.a',
40 'SHARED_LIB_SUFFIX': '.so',
41 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni',
42 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
43 'PRODUCT_DIR': '$(builddir)',
44 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
45 'LIB_DIR': '$(obj).$(TOOLSET)',
46 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
47 'RULE_INPUT_PATH': '$(abspath $<)',
48 'RULE_INPUT_EXT': '$(suffix $<)',
49 'RULE_INPUT_NAME': '$(notdir $<)',
50
51 # This appears unused --- ?
52 'CONFIGURATION_NAME': '$(BUILDTYPE)',
53 }
54
55 # Make supports multiple toolsets
56 generator_supports_multiple_toolsets = True
57
58 def ensure_directory_exists(path):
59 dir = os.path.dirname(path)
60 if dir and not os.path.exists(dir):
61 os.makedirs(dir)
62
63 # Header of toplevel Makefile.
64 # This should go into the build tree, but it's easier to keep it here for now.
65 SHARED_HEADER = ("""\
66 # We borrow heavily from the kernel build setup, though we are simpler since
67 # we don't have Kconfig tweaking settings on us.
68
69 # The implicit make rules have it looking for RCS files, among other things.
70 # We instead explicitly write all the rules we care about.
71 # It's even quicker (saves ~200ms) to pass -r on the command line.
72 MAKEFLAGS=-r
73
74 # The V=1 flag on command line makes us verbosely print command lines.
75 ifdef V
76 quiet=
77 else
78 quiet=quiet_
79 endif
80
81 # Specify BUILDTYPE=Release on the command line for a release build.
82 BUILDTYPE ?= __default_configuration__
83
84 # Directory all our build output goes into.
85 # Note that this must be two directories beneath src/ for unit tests to pass,
86 # as they reach into the src/ directory for data with relative paths.
87 builddir ?= $(builddir_name)/$(BUILDTYPE)
88 abs_builddir := $(abspath $(builddir))
89 depsdir := $(builddir)/.deps
90
91 # Object output directory.
92 obj := $(builddir)/obj
93 abs_obj := $(abspath $(obj))
94
95 # We build up a list of every single one of the targets so we can slurp in the
96 # generated dependency rule Makefiles in one pass.
97 all_deps :=
98
99 # C++ apps need to be linked with g++. Not sure what's appropriate.
100 LINK ?= $(CXX)
101
102 CC.target ?= $(CC)
103 CFLAGS.target ?= $(CFLAGS)
104 CXX.target ?= $(CXX)
105 CXXFLAGS.target ?= $(CXXFLAGS)
106 LINK.target ?= $(LINK)
107 LDFLAGS.target ?= $(LDFLAGS)
108 AR.target ?= $(AR)
109 RANLIB.target ?= ranlib
110
111 CC.host ?= gcc
112 CFLAGS.host ?=
113 CXX.host ?= g++
114 CXXFLAGS.host ?=
115 LINK.host ?= g++
116 LDFLAGS.host ?=
117 AR.host ?= ar
118 RANLIB.host ?= ranlib
119
120 # Flags to make gcc output dependency info. Note that you need to be
121 # careful here to use the flags that ccache and distcc can understand.
122 # We write to a dep file on the side first and then rename at the end
123 # so we can't end up with a broken dep file.
124 depfile = $(depsdir)/$@.d
125 DEPFLAGS = -MMD -MF $(depfile).raw
126
127 # We have to fixup the deps output in a few ways.
128 # (1) the file output should mention the proper .o file.
129 # ccache or distcc lose the path to the target, so we convert a rule of
130 # the form:
131 # foobar.o: DEP1 DEP2
132 # into
133 # path/to/foobar.o: DEP1 DEP2
134 # (2) we want missing files not to cause us to fail to build.
135 # We want to rewrite
136 # foobar.o: DEP1 DEP2 \\
137 # DEP3
138 # to
139 # DEP1:
140 # DEP2:
141 # DEP3:
142 # so if the files are missing, they're just considered phony rules.
143 # We have to do some pretty insane escaping to get those backslashes
144 # and dollar signs past make, the shell, and sed at the same time."""
145 r"""
146 define fixup_dep
147 # Fixup path as in (1).
148 sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
149 # Add extra rules as in (2).
150 # We remove slashes and replace spaces with new lines;
151 # remove blank lines;
152 # delete the first line and append a colon to the remaining lines.
153 sed -e 's|\\||' -e 's| |\n|g' $(depfile).raw |\
154 grep -v '^$$' |\
155 sed -e 1d -e 's|$$|:|' \
156 >> $(depfile)
157 rm $(depfile).raw
158 endef
159 """
160 """
161 # Command definitions:
162 # - cmd_foo is the actual command to run;
163 # - quiet_cmd_foo is the brief-output summary of the command.
164
165 quiet_cmd_cc = CC($(TOOLSET)) $@
166 cmd_cc = $(CC.$(TOOLSET)) $(CFLAGS.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) -c -o $@ $<
167
168 quiet_cmd_cxx = CXX($(TOOLSET)) $@
169 cmd_cxx = $(CXX.$(TOOLSET)) $(CXXFLAGS.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) -c -o $@ $<
170
171 quiet_cmd_alink = AR+RANLIB($(TOOLSET)) $@
172 cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) rc $@ $(filter %.o,$^) && $(RANLIB.$(TOOLSET)) $@
173
174 quiet_cmd_touch = TOUCH $@
175 cmd_touch = touch $@
176
177 quiet_cmd_copy = COPY $@
178 # send stderr to /dev/null to ignore messages when linking directories.
179 cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@
180
181 # Due to circular dependencies between libraries :(, we wrap the
182 # special "figure out circular dependencies" flags around the entire
183 # input list during linking.
184 quiet_cmd_link = LINK($(TOOLSET)) $@
185 cmd_link = $(LINK.$(TOOLSET)) $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
186
187 # Shared-object link (for generating .so).
188 # Set SONAME to the library filename so our binaries don't reference the local,
189 # absolute paths used on the link command-line.
190 # TODO: perhaps this can share with the LINK command above?
191 quiet_cmd_solink = SOLINK($(TOOLSET)) $@
192 cmd_solink = $(LINK.$(TOOLSET)) -shared $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
193 """
194 r"""
195 # Define an escape_quotes function to escape single quotes.
196 # This allows us to handle quotes properly as long as we always use
197 # use single quotes and escape_quotes.
198 escape_quotes = $(subst ','\'',$(1))
199 # This comment is here just to include a ' to unconfuse syntax highlighting.
200 # Define an escape_vars function to escape '$' variable syntax.
201 # This allows us to read/write command lines with shell variables (e.g.
202 # $LD_LIBRARY_PATH), without triggering make substitution.
203 escape_vars = $(subst $$,$$$$,$(1))
204 # Helper that expands to a shell command to echo a string exactly as it is in
205 # make. This uses printf instead of echo because printf's behaviour with respect
206 # to escape sequences is more portable than echo's across different shells
207 # (e.g., dash, bash).
208 exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
209 """
210 """
211 # Helper to compare the command we're about to run against the command
212 # we logged the last time we ran the command. Produces an empty
213 # string (false) when the commands match.
214 # Tricky point: Make has no string-equality test function.
215 # The kernel uses the following, but it seems like it would have false
216 # positives, where one string reordered its arguments.
217 # arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
218 # $(filter-out $(cmd_$@), $(cmd_$(1))))
219 # We instead substitute each for the empty string into the other, and
220 # say they're equal if both substitutions produce the empty string.
221 command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\
222 $(subst $(cmd_$@),,$(cmd_$(1))))
223
224 # Helper that is non-empty when a prerequisite changes.
225 # Normally make does this implicitly, but we force rules to always run
226 # so we can check their command lines.
227 # $? -- new prerequisites
228 # $| -- order-only dependencies
229 prereq_changed = $(filter-out $|,$?)
230
231 # do_cmd: run a command via the above cmd_foo names, if necessary.
232 # Should always run for a given target to handle command-line changes.
233 # Second argument, if non-zero, makes it do C/C++ dependency munging.
234 define do_cmd
235 $(if $(or $(command_changed),$(prereq_changed)),
236 @$(call exact_echo, $($(quiet)cmd_$(1)))
237 @mkdir -p $(dir $@) $(dir $(depfile))
238 @$(cmd_$(1))
239 @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile)
240 @$(if $(2),$(fixup_dep))
241 )
242 endef
243
244 # Declare "all" target first so it is the default, even though we don't have the
245 # deps yet.
246 .PHONY: all
247 all:
248
249 # make looks for ways to re-generate included makefiles, but in our case, we
250 # don't have a direct way. Explicitly telling make that it has nothing to do
251 # for them makes it go faster.
252 %.d: ;
253
254 # Use FORCE_DO_CMD to force a target to run. Should be coupled with
255 # do_cmd.
256 .PHONY: FORCE_DO_CMD
257 FORCE_DO_CMD:
258
259 """)
260
261 ROOT_HEADER_SUFFIX_RULES = ("""\
262 # Suffix rules, putting all outputs into $(obj).
263 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
264 @$(call do_cmd,cc,1)
265 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
266 @$(call do_cmd,cc)
267 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
268 @$(call do_cmd,cc)
269 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
270 @$(call do_cmd,cxx,1)
271 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
272 @$(call do_cmd,cxx,1)
273 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
274 @$(call do_cmd,cxx,1)
275
276 # Try building from generated source, too.
277 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
278 @$(call do_cmd,cc,1)
279 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
280 @$(call do_cmd,cc)
281 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
282 @$(call do_cmd,cc)
283 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
284 @$(call do_cmd,cxx,1)
285 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
286 @$(call do_cmd,cxx,1)
287 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
288 @$(call do_cmd,cxx,1)
289
290 $(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
291 @$(call do_cmd,cc,1)
292 $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
293 @$(call do_cmd,cc)
294 $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
295 @$(call do_cmd,cc)
296 $(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
297 @$(call do_cmd,cxx,1)
298 $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
299 @$(call do_cmd,cxx,1)
300 $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
301 @$(call do_cmd,cxx,1)
302 """)
303
304 SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
305 # Suffix rules, putting all outputs into $(obj).
306 """)
307
308 SHARED_HEADER_SUFFIX_RULES_SRCDIR = {
309 '.c': ("""\
310 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
311 @$(call do_cmd,cc,1)
312 """),
313 '.s': ("""\
314 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
315 @$(call do_cmd,cc)
316 """),
317 '.S': ("""\
318 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
319 @$(call do_cmd,cc)
320 """),
321 '.cpp': ("""\
322 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
323 @$(call do_cmd,cxx,1)
324 """),
325 '.cc': ("""\
326 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
327 @$(call do_cmd,cxx,1)
328 """),
329 '.cxx': ("""\
330 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
331 @$(call do_cmd,cxx,1)
332 """),
333 }
334
335 SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
336 # Try building from generated source, too.
337 """)
338
339 SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = {
340 '.c': ("""\
341 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
342 @$(call do_cmd,cc,1)
343 """),
344 '.cc': ("""\
345 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
346 @$(call do_cmd,cxx,1)
347 """),
348 '.cpp': ("""\
349 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
350 @$(call do_cmd,cxx,1)
351 """),
352 }
353
354 SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = {
355 '.c': ("""\
356 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
357 @$(call do_cmd,cc,1)
358 """),
359 '.cc': ("""\
360 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
361 @$(call do_cmd,cxx,1)
362 """),
363 '.cpp': ("""\
364 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
365 @$(call do_cmd,cxx,1)
366 """),
367 }
368
369 SHARED_HEADER_SUFFIX_RULES = (
370 SHARED_HEADER_SUFFIX_RULES_COMMENT1 +
371 ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) +
372 SHARED_HEADER_SUFFIX_RULES_COMMENT2 +
373 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) +
374 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values())
375 )
376
377 # This gets added to the very beginning of the Makefile.
378 SHARED_HEADER_SRCDIR = ("""\
379 # The source directory tree.
380 srcdir := %s
381
382 """)
383
384 SHARED_HEADER_BUILDDIR_NAME = ("""\
385 # The name of the builddir.
386 builddir_name ?= %s
387
388 """)
389
390 SHARED_FOOTER = """\
391 # "all" is a concatenation of the "all" targets from all the included
392 # sub-makefiles. This is just here to clarify.
393 all:
394
395 # Add in dependency-tracking rules. $(all_deps) is the list of every single
396 # target in our tree. First, only consider targets that already have been
397 # built, as unbuilt targets will be built regardless of dependency info:
398 all_deps := $(wildcard $(sort $(all_deps)))
399 # Of those, only consider the ones with .d (dependency) info:
400 d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
401 ifneq ($(d_files),)
402 include $(d_files)
403 endif
404 """
405
406 header = """\
407 # This file is generated by gyp; do not edit.
408
409 """
410
411
412 def Compilable(filename):
413 """Return true if the file is compilable (should be in OBJS)."""
414 for res in (filename.endswith(e) for e
415 in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']):
416 if res:
417 return True
418 return False
419
420
421 def Target(filename):
422 """Translate a compilable filename to its .o target."""
423 return os.path.splitext(filename)[0] + '.o'
424
425
426 def EscapeShellArgument(s):
427 """Quotes an argument so that it will be interpreted literally by a POSIX
428 shell. Taken from
429 http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
430 """
431 return "'" + s.replace("'", "'\\''") + "'"
432
433
434 def EscapeMakeVariableExpansion(s):
435 """Make has its own variable expansion syntax using $. We must escape it for
436 string to be interpreted literally."""
437 return s.replace('$', '$$')
438
439
440 def EscapeCppDefine(s):
441 """Escapes a CPP define so that it will reach the compiler unaltered."""
442 s = EscapeShellArgument(s)
443 s = EscapeMakeVariableExpansion(s)
444 return s
445
446
447 def QuoteIfNecessary(string):
448 """TODO: Should this ideally be replaced with one or more of the above
449 functions?"""
450 if '"' in string:
451 string = '"' + string.replace('"', '\\"') + '"'
452 return string
453
454
455 srcdir_prefix = ''
456 def Sourceify(path):
457 """Convert a path to its source directory form."""
458 if '$(' in path:
459 return path
460 if os.path.isabs(path):
461 return path
462 return srcdir_prefix + path
463
464
465 # Map from qualified target to path to output.
466 target_outputs = {}
467 # Map from qualified target to a list of all linker dependencies,
468 # transitively expanded.
469 # Used in building shared-library-based executables.
470 target_link_deps = {}
471
472
473 class MakefileWriter:
474 """MakefileWriter packages up the writing of one target-specific foobar.mk.
475
476 Its only real entry point is Write(), and is mostly used for namespacing.
477 """
478
479 def Write(self, qualified_target, base_path, output_filename, spec, configs,
480 part_of_all):
481 """The main entry point: writes a .mk file for a single target.
482
483 Arguments:
484 qualified_target: target we're generating
485 base_path: path relative to source root we're building in, used to resolve
486 target-relative paths
487 output_filename: output .mk file name to write
488 spec, configs: gyp info
489 part_of_all: flag indicating this target is part of 'all'
490 """
491 print 'Generating %s' % output_filename
492
493 ensure_directory_exists(output_filename)
494
495 self.fp = open(output_filename, 'w')
496
497 self.fp.write(header)
498
499 self.path = base_path
500 self.target = spec['target_name']
501 self.type = spec['type']
502 self.toolset = spec['toolset']
503
504 deps, link_deps = self.ComputeDeps(spec)
505
506 # Some of the generation below can add extra output, sources, or
507 # link dependencies. All of the out params of the functions that
508 # follow use names like extra_foo.
509 extra_outputs = []
510 extra_sources = []
511 extra_link_deps = []
512
513 self.output = self.ComputeOutput(spec)
514 self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
515 'shared_library')
516 if self.type in self._INSTALLABLE_TARGETS:
517 self.alias = os.path.basename(self.output)
518 else:
519 self.alias = self.output
520
521 self.WriteLn("TOOLSET := " + self.toolset)
522 self.WriteLn("TARGET := " + self.target)
523
524 # Actions must come first, since they can generate more OBJs for use below.
525 if 'actions' in spec:
526 self.WriteActions(spec['actions'], extra_sources, extra_outputs,
527 part_of_all)
528
529 # Rules must be early like actions.
530 if 'rules' in spec:
531 self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all)
532
533 if 'copies' in spec:
534 self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
535
536 all_sources = spec.get('sources', []) + extra_sources
537 if all_sources:
538 self.WriteSources(configs, deps, all_sources,
539 extra_outputs, extra_link_deps, part_of_all)
540 sources = filter(Compilable, all_sources)
541 if sources:
542 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
543 extensions = set([os.path.splitext(s)[1] for s in sources])
544 for ext in extensions:
545 if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR:
546 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext])
547 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
548 for ext in extensions:
549 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1:
550 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext])
551 for ext in extensions:
552 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2:
553 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext])
554 self.WriteLn('# End of this set of suffix rules')
555
556
557 self.WriteTarget(spec, configs, deps,
558 extra_link_deps + link_deps, extra_outputs, part_of_all)
559
560 # Update global list of target outputs, used in dependency tracking.
561 target_outputs[qualified_target] = self.alias
562
563 # Update global list of link dependencies.
564 if self.type == 'static_library':
565 target_link_deps[qualified_target] = [self.output]
566 elif self.type == 'shared_library':
567 # Anyone that uses us transitively depend on all of our link
568 # dependencies.
569 target_link_deps[qualified_target] = [self.output] + link_deps
570
571 self.fp.close()
572
573
574 def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
575 """Write a "sub-project" Makefile.
576
577 This is a small, wrapper Makefile that calls the top-level Makefile to build
578 the targets from a single gyp file (i.e. a sub-project).
579
580 Arguments:
581 output_filename: sub-project Makefile name to write
582 makefile_path: path to the top-level Makefile
583 targets: list of "all" targets for this sub-project
584 build_dir: build output directory, relative to the sub-project
585 """
586 print 'Generating %s' % output_filename
587
588 ensure_directory_exists(output_filename)
589 self.fp = open(output_filename, 'w')
590 self.fp.write(header)
591 # For consistency with other builders, put sub-project build output in the
592 # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
593 self.WriteLn('export builddir_name ?= %s' %
594 os.path.join(os.path.dirname(output_filename), build_dir))
595 self.WriteLn('.PHONY: all')
596 self.WriteLn('all:')
597 if makefile_path:
598 makefile_path = ' -C ' + makefile_path
599 self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
600 self.fp.close()
601
602
603 def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all):
604 """Write Makefile code for any 'actions' from the gyp input.
605
606 extra_sources: a list that will be filled in with newly generated source
607 files, if any
608 extra_outputs: a list that will be filled in with any outputs of these
609 actions (used to make other pieces dependent on these
610 actions)
611 part_of_all: flag indicating this target is part of 'all'
612 """
613 for action in actions:
614 name = self.target + '_' + action['action_name']
615 self.WriteLn('### Rules for action "%s":' % action['action_name'])
616 inputs = action['inputs']
617 outputs = action['outputs']
618
619 # Build up a list of outputs.
620 # Collect the output dirs we'll need.
621 dirs = set()
622 for out in outputs:
623 dir = os.path.split(out)[0]
624 if dir:
625 dirs.add(dir)
626 if int(action.get('process_outputs_as_sources', False)):
627 extra_sources += outputs
628
629 # Write the actual command.
630 command = gyp.common.EncodePOSIXShellList(action['action'])
631 if 'message' in action:
632 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
633 else:
634 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
635 if len(dirs) > 0:
636 command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
637 # Set LD_LIBRARY_PATH in case the action runs an executable from this
638 # build which links to shared libs from this build.
639 if self.path:
640 cd_action = 'cd %s; ' % Sourceify(self.path)
641 else:
642 cd_action = ''
643 # actions run on the host, so they should in theory only use host
644 # libraries, but until everything is made cross-compile safe, also use
645 # target libraries.
646 # TODO(piman): when everything is cross-compile safe, remove lib.target
647 self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:'
648 '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s'
649 % (name, cd_action, command))
650 self.WriteLn()
651 outputs = map(self.Absolutify, outputs)
652 # The makefile rules are all relative to the top dir, but the gyp actions
653 # are defined relative to their containing dir. This replaces the obj
654 # variable for the action rule with an absolute version so that the output
655 # goes in the right place.
656 # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
657 # it's superfluous for the "extra outputs", and this avoids accidentally
658 # writing duplicate dummy rules for those outputs.
659 self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
660 self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
661 self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
662 part_of_all=part_of_all, command=name)
663
664 # Stuff the outputs in a variable so we can refer to them later.
665 outputs_variable = 'action_%s_outputs' % name
666 self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
667 extra_outputs.append('$(%s)' % outputs_variable)
668 self.WriteLn()
669
670 self.WriteLn()
671
672
673 def WriteRules(self, rules, extra_sources, extra_outputs, part_of_all):
674 """Write Makefile code for any 'rules' from the gyp input.
675
676 extra_sources: a list that will be filled in with newly generated source
677 files, if any
678 extra_outputs: a list that will be filled in with any outputs of these
679 rules (used to make other pieces dependent on these rules)
680 part_of_all: flag indicating this target is part of 'all'
681 """
682 for rule in rules:
683 name = self.target + '_' + rule['rule_name']
684 count = 0
685 self.WriteLn('### Generated for rule %s:' % name)
686
687 all_outputs = []
688
689 for rule_source in rule['rule_sources']:
690 dirs = set()
691 rule_source_basename = os.path.basename(rule_source)
692 (rule_source_root, rule_source_ext) = \
693 os.path.splitext(rule_source_basename)
694
695 outputs = [self.ExpandInputRoot(out, rule_source_root)
696 for out in rule['outputs']]
697 for out in outputs:
698 dir = os.path.dirname(out)
699 if dir:
700 dirs.add(dir)
701 if int(rule.get('process_outputs_as_sources', False)):
702 extra_sources.append(out)
703 all_outputs += outputs
704 inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
705 rule.get('inputs', [])))
706 actions = ['$(call do_cmd,%s_%d)' % (name, count)]
707
708 if name == 'resources_grit':
709 # HACK: This is ugly. Grit intentionally doesn't touch the
710 # timestamp of its output file when the file doesn't change,
711 # which is fine in hash-based dependency systems like scons
712 # and forge, but not kosher in the make world. After some
713 # discussion, hacking around it here seems like the least
714 # amount of pain.
715 actions += ['@touch --no-create $@']
716
717 # Only write the 'obj' and 'builddir' rules for the "primary" output
718 # (:1); it's superfluous for the "extra outputs", and this avoids
719 # accidentally writing duplicate dummy rules for those outputs.
720 self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
721 self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])
722 self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
723 self.WriteLn('all_deps += %s' % ' '.join(outputs))
724
725 action = [self.ExpandInputRoot(ac, rule_source_root)
726 for ac in rule['action']]
727 mkdirs = ''
728 if len(dirs) > 0:
729 mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
730 if self.path:
731 cd_action = 'cd %s; ' % Sourceify(self.path)
732 else:
733 cd_action = ''
734 # Set LD_LIBRARY_PATH in case the rule runs an executable from this
735 # build which links to shared libs from this build.
736 # rules run on the host, so they should in theory only use host
737 # libraries, but until everything is made cross-compile safe, also use
738 # target libraries.
739 # TODO(piman): when everything is cross-compile safe, remove lib.target
740 self.WriteLn(
741 "cmd_%(name)s_%(count)d = export LD_LIBRARY_PATH="
742 "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
743 "%(cd_action)s%(mkdirs)s%(action)s" % {
744 'action': gyp.common.EncodePOSIXShellList(action),
745 'cd_action': cd_action,
746 'count': count,
747 'mkdirs': mkdirs,
748 'name': name,
749 })
750 self.WriteLn(
751 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
752 'count': count,
753 'name': name,
754 })
755 self.WriteLn()
756 count += 1
757
758 outputs_variable = 'rule_%s_outputs' % name
759 self.WriteList(all_outputs, outputs_variable)
760 extra_outputs.append('$(%s)' % outputs_variable)
761
762 self.WriteLn('### Finished generating for rule: %s' % name)
763 self.WriteLn()
764 self.WriteLn('### Finished generating for all rules')
765 self.WriteLn('')
766
767
768 def WriteCopies(self, copies, extra_outputs, part_of_all):
769 """Write Makefile code for any 'copies' from the gyp input.
770
771 extra_outputs: a list that will be filled in with any outputs of this action
772 (used to make other pieces dependent on this action)
773 part_of_all: flag indicating this target is part of 'all'
774 """
775 self.WriteLn('### Generated for copy rule.')
776
777 variable = self.target + '_copies'
778 outputs = []
779 for copy in copies:
780 for path in copy['files']:
781 path = Sourceify(self.Absolutify(path))
782 filename = os.path.split(path)[1]
783 output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
784 filename)))
785 self.WriteDoCmd([output], [path], 'copy', part_of_all)
786 outputs.append(output)
787 self.WriteLn('%s = %s' % (variable, ' '.join(outputs)))
788 extra_outputs.append('$(%s)' % variable)
789 self.WriteLn()
790
791
792 def WriteSources(self, configs, deps, sources,
793 extra_outputs, extra_link_deps,
794 part_of_all):
795 """Write Makefile code for any 'sources' from the gyp input.
796 These are source files necessary to build the current target.
797
798 configs, deps, sources: input from gyp.
799 extra_outputs: a list of extra outputs this action should be dependent on;
800 used to serialize action/rules before compilation
801 extra_link_deps: a list that will be filled in with any outputs of
802 compilation (to be used in link lines)
803 part_of_all: flag indicating this target is part of 'all'
804 """
805
806 # Write configuration-specific variables for CFLAGS, etc.
807 for configname in sorted(configs.keys()):
808 config = configs[configname]
809 self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
810 quoter=EscapeCppDefine)
811 self.WriteLn("# Flags passed to both C and C++ files.");
812 self.WriteList(config.get('cflags'), 'CFLAGS_%s' % configname)
813 self.WriteLn("# Flags passed to only C (and not C++) files.");
814 self.WriteList(config.get('cflags_c'), 'CFLAGS_C_%s' % configname)
815 self.WriteLn("# Flags passed to only C++ (and not C) files.");
816 self.WriteList(config.get('cflags_cc'), 'CFLAGS_CC_%s' % configname)
817 includes = config.get('include_dirs')
818 if includes:
819 includes = map(Sourceify, map(self.Absolutify, includes))
820 self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
821
822 sources = filter(Compilable, sources)
823 objs = map(self.Objectify, map(self.Absolutify, map(Target, sources)))
824 self.WriteList(objs, 'OBJS')
825
826 self.WriteLn('# Add to the list of files we specially track '
827 'dependencies for.')
828 self.WriteLn('all_deps += $(OBJS)')
829 self.WriteLn()
830
831 # Make sure our dependencies are built first.
832 if deps:
833 self.WriteMakeRule(['$(OBJS)'], deps,
834 comment = 'Make sure our dependencies are built '
835 'before any of us.',
836 order_only = True)
837
838 # Make sure the actions and rules run first.
839 # If they generate any extra headers etc., the per-.o file dep tracking
840 # will catch the proper rebuilds, so order only is still ok here.
841 if extra_outputs:
842 self.WriteMakeRule(['$(OBJS)'], extra_outputs,
843 comment = 'Make sure our actions/rules run '
844 'before any of us.',
845 order_only = True)
846
847 if objs:
848 extra_link_deps.append('$(OBJS)')
849 self.WriteLn("""\
850 # CFLAGS et al overrides must be target-local.
851 # See "Target-specific Variable Values" in the GNU Make manual.""")
852 self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
853 self.WriteLn("$(OBJS): GYP_CFLAGS := $(CFLAGS_$(BUILDTYPE)) "
854 "$(CFLAGS_C_$(BUILDTYPE)) "
855 "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))")
856 self.WriteLn("$(OBJS): GYP_CXXFLAGS := $(CFLAGS_$(BUILDTYPE)) "
857 "$(CFLAGS_CC_$(BUILDTYPE)) "
858 "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))")
859
860 self.WriteLn()
861
862
863 def ComputeOutput(self, spec):
864 """Return the 'output' (full output path) of a gyp spec.
865
866 E.g., the loadable module 'foobar' in directory 'baz' will produce
867 '$(obj)/baz/libfoobar.so'
868 """
869 output = None
870 target = spec['target_name']
871 target_prefix = ''
872 target_ext = ''
873 path = os.path.join('$(obj).' + self.toolset, self.path)
874 if self.type == 'static_library':
875 if target[:3] == 'lib':
876 target = target[3:]
877 target_prefix = 'lib'
878 target_ext = '.a'
879 elif self.type in ('loadable_module', 'shared_library'):
880 if target[:3] == 'lib':
881 target = target[3:]
882 target_prefix = 'lib'
883 target_ext = '.so'
884 elif self.type == 'none':
885 target = '%s.stamp' % target
886 elif self.type == 'settings':
887 return None
888 elif self.type == 'executable':
889 path = os.path.join('$(builddir)')
890 else:
891 print ("ERROR: What output file should be generated?",
892 "typ", self.type, "target", target)
893
894 path = spec.get('product_dir', path)
895 target_prefix = spec.get('product_prefix', target_prefix)
896 target = spec.get('product_name', target)
897 product_ext = spec.get('product_extension')
898 if product_ext:
899 target_ext = '.' + product_ext
900
901 return os.path.join(path, target_prefix + target + target_ext)
902
903
904 def ComputeDeps(self, spec):
905 """Compute the dependencies of a gyp spec.
906
907 Returns a tuple (deps, link_deps), where each is a list of
908 filenames that will need to be put in front of make for either
909 building (deps) or linking (link_deps).
910 """
911 deps = []
912 link_deps = []
913 if 'dependencies' in spec:
914 deps.extend([target_outputs[dep] for dep in spec['dependencies']
915 if target_outputs[dep]])
916 for dep in spec['dependencies']:
917 if dep in target_link_deps:
918 link_deps.extend(target_link_deps[dep])
919 deps.extend(link_deps)
920 # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
921 # This hack makes it work:
922 # link_deps.extend(spec.get('libraries', []))
923 return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
924
925
926 def WriteTarget(self, spec, configs, deps, link_deps, extra_outputs,
927 part_of_all):
928 """Write Makefile code to produce the final target of the gyp spec.
929
930 spec, configs: input from gyp.
931 deps, link_deps: dependency lists; see ComputeDeps()
932 extra_outputs: any extra outputs that our target should depend on
933 part_of_all: flag indicating this target is part of 'all'
934 """
935
936 self.WriteLn('### Rules for final target.')
937
938 if extra_outputs:
939 self.WriteMakeRule([self.output], extra_outputs,
940 comment = 'Build our special outputs first.',
941 order_only = True)
942 self.WriteMakeRule(extra_outputs, deps,
943 comment=('Preserve order dependency of '
944 'special output on deps.'),
945 order_only = True,
946 multiple_output_trick = False)
947
948 if self.type not in ('settings', 'none'):
949 for configname in sorted(configs.keys()):
950 config = configs[configname]
951 self.WriteList(config.get('ldflags'), 'LDFLAGS_%s' % configname)
952 libraries = spec.get('libraries')
953 if libraries:
954 # Remove duplicate entries
955 libraries = gyp.common.uniquer(libraries)
956 self.WriteList(libraries, 'LIBS')
957 self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output)
958 self.WriteLn('%s: LIBS := $(LIBS)' % self.output)
959
960 if self.type == 'executable':
961 self.WriteDoCmd([self.output], link_deps, 'link', part_of_all)
962 elif self.type == 'static_library':
963 self.WriteDoCmd([self.output], link_deps, 'alink', part_of_all)
964 elif self.type in ('loadable_module', 'shared_library'):
965 self.WriteDoCmd([self.output], link_deps, 'solink', part_of_all)
966 elif self.type == 'none':
967 # Write a stamp line.
968 self.WriteDoCmd([self.output], deps, 'touch', part_of_all)
969 elif self.type == 'settings':
970 # Only used for passing flags around.
971 pass
972 else:
973 print "WARNING: no output for", self.type, target
974
975 # Add an alias for each target (if there are any outputs).
976 # Installable target aliases are created below.
977 if ((self.output and self.output != self.target) and
978 (self.type not in self._INSTALLABLE_TARGETS)):
979 self.WriteMakeRule([self.target], [self.output],
980 comment='Add target alias', phony = True)
981 if part_of_all:
982 self.WriteMakeRule(['all'], [self.target],
983 comment = 'Add target alias to "all" target.',
984 phony = True)
985
986 # Add special-case rules for our installable targets.
987 # 1) They need to install to the build dir or "product" dir.
988 # 2) They get shortcuts for building (e.g. "make chrome").
989 # 3) They are part of "make all".
990 if self.type in self._INSTALLABLE_TARGETS:
991 if self.type in ('shared_library'):
992 file_desc = 'shared library'
993 # Install all shared libs into a common directory (per toolset) for
994 # convenient access with LD_LIBRARY_PATH.
995 binpath = '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
996 else:
997 file_desc = 'executable'
998 binpath = '$(builddir)/' + self.alias
999 installable_deps = [self.output]
1000 # Point the target alias to the final binary output.
1001 self.WriteMakeRule([self.target], [binpath],
1002 comment='Add target alias', phony = True)
1003 if binpath != self.output:
1004 self.WriteDoCmd([binpath], [self.output], 'copy',
1005 comment = 'Copy this to the %s output path.' %
1006 file_desc, part_of_all=part_of_all)
1007 installable_deps.append(binpath)
1008 if self.output != self.alias and self.alias != self.target:
1009 self.WriteMakeRule([self.alias], installable_deps,
1010 comment = 'Short alias for building this %s.' %
1011 file_desc, phony = True)
1012 if part_of_all:
1013 self.WriteMakeRule(['all'], [binpath],
1014 comment = 'Add %s to "all" target.' % file_desc,
1015 phony = True)
1016
1017
1018 def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary):
1019 """Write a variable definition that is a list of values.
1020
1021 E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
1022 foo = blaha blahb
1023 but in a pretty-printed style.
1024 """
1025 self.fp.write(variable + " := ")
1026 if list:
1027 list = [quoter(prefix + l) for l in list]
1028 self.fp.write(" \\\n\t".join(list))
1029 self.fp.write("\n\n")
1030
1031
1032 def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None):
1033 """Write a Makefile rule that uses do_cmd.
1034
1035 This makes the outputs dependent on the command line that was run,
1036 as well as support the V= make command line flag.
1037 """
1038 self.WriteMakeRule(outputs, inputs,
1039 actions = ['$(call do_cmd,%s)' % command],
1040 comment = comment,
1041 force = True)
1042 # Add our outputs to the list of targets we read depfiles from.
1043 self.WriteLn('all_deps += %s' % ' '.join(outputs))
1044
1045
1046 def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
1047 order_only=False, force=False, phony=False,
1048 multiple_output_trick=True):
1049 """Write a Makefile rule, with some extra tricks.
1050
1051 outputs: a list of outputs for the rule (note: this is not directly
1052 supported by make; see comments below)
1053 inputs: a list of inputs for the rule
1054 actions: a list of shell commands to run for the rule
1055 comment: a comment to put in the Makefile above the rule (also useful
1056 for making this Python script's code self-documenting)
1057 order_only: if true, makes the dependency order-only
1058 force: if true, include FORCE_DO_CMD as an order-only dep
1059 phony: if true, the rule does not actually generate the named output, the
1060 output is just a name to run the rule
1061 multiple_output_trick: if true (the default), perform tricks such as dummy
1062 rules to avoid problems with multiple outputs.
1063 """
1064 if comment:
1065 self.WriteLn('# ' + comment)
1066 if phony:
1067 self.WriteLn('.PHONY: ' + ' '.join(outputs))
1068 # TODO(evanm): just make order_only a list of deps instead of these hacks.
1069 if order_only:
1070 order_insert = '| '
1071 else:
1072 order_insert = ''
1073 if force:
1074 force_append = ' FORCE_DO_CMD'
1075 else:
1076 force_append = ''
1077 if actions:
1078 self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
1079 self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs),
1080 force_append))
1081 if actions:
1082 for action in actions:
1083 self.WriteLn('\t%s' % action)
1084 if multiple_output_trick and len(outputs) > 1:
1085 # If we have more than one output, a rule like
1086 # foo bar: baz
1087 # that for *each* output we must run the action, potentially
1088 # in parallel. That is not what we're trying to write -- what
1089 # we want is that we run the action once and it generates all
1090 # the files.
1091 # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
1092 # discusses this problem and has this solution:
1093 # 1) Write the naive rule that would produce parallel runs of
1094 # the action.
1095 # 2) Make the outputs seralized on each other, so we won't start
1096 # a parallel run until the first run finishes, at which point
1097 # we'll have generated all the outputs and we're done.
1098 self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
1099 # Add a dummy command to the "extra outputs" rule, otherwise make seems to
1100 # think these outputs haven't (couldn't have?) changed, and thus doesn't
1101 # flag them as changed (i.e. include in '$?') when evaluating dependent
1102 # rules, which in turn causes do_cmd() to skip running dependent commands.
1103 self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
1104 self.WriteLn()
1105
1106
1107 def WriteLn(self, text=''):
1108 self.fp.write(text + '\n')
1109
1110
1111 def Objectify(self, path):
1112 """Convert a path to its output directory form."""
1113 if '$(' in path:
1114 path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
1115 return path
1116 return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
1117
1118 def Absolutify(self, path):
1119 """Convert a subdirectory-relative path into a base-relative path.
1120 Skips over paths that contain variables."""
1121 if '$(' in path:
1122 return path
1123 return os.path.normpath(os.path.join(self.path, path))
1124
1125
1126 def FixupArgPath(self, arg):
1127 if '/' in arg or '.h.' in arg:
1128 return self.Absolutify(arg)
1129 return arg
1130
1131
1132 def ExpandInputRoot(self, template, expansion):
1133 if '%(INPUT_ROOT)s' not in template:
1134 return template
1135 path = template % { 'INPUT_ROOT': expansion }
1136 if not os.path.dirname(path):
1137 # If it's just the file name, turn it into a path so FixupArgPath()
1138 # will know to Absolutify() it.
1139 path = os.path.join('.', path)
1140 return path
1141
1142
1143 def GenerateOutput(target_list, target_dicts, data, params):
1144 options = params['options']
1145 generator_flags = params.get('generator_flags', {})
1146 builddir_name = generator_flags.get('output_dir', 'out')
1147
1148 def CalculateMakefilePath(build_file, base_name):
1149 """Determine where to write a Makefile for a given gyp file."""
1150 # Paths in gyp files are relative to the .gyp file, but we want
1151 # paths relative to the source root for the master makefile. Grab
1152 # the path of the .gyp file as the base to relativize against.
1153 # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
1154 base_path = gyp.common.RelativePath(os.path.dirname(build_file),
1155 options.depth)
1156 # We write the file in the base_path directory.
1157 output_file = os.path.join(options.depth, base_path, base_name)
1158 if options.generator_output:
1159 output_file = os.path.join(options.generator_output, output_file)
1160 base_path = gyp.common.RelativePath(os.path.dirname(build_file),
1161 options.toplevel_dir)
1162 return base_path, output_file
1163
1164 # TODO: search for the first non-'Default' target. This can go
1165 # away when we add verification that all targets have the
1166 # necessary configurations.
1167 default_configuration = None
1168 toolsets = set([target_dicts[target]['toolset'] for target in target_list])
1169 for target in target_list:
1170 spec = target_dicts[target]
1171 if spec['default_configuration'] != 'Default':
1172 default_configuration = spec['default_configuration']
1173 break
1174 if not default_configuration:
1175 default_configuration = 'Default'
1176
1177 srcdir = '.'
1178 makefile_name = 'Makefile' + options.suffix
1179 makefile_path = os.path.join(options.toplevel_dir, makefile_name)
1180 if options.generator_output:
1181 global srcdir_prefix
1182 makefile_path = os.path.join(options.generator_output, makefile_path)
1183 srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
1184 srcdir_prefix = '$(srcdir)/'
1185 ensure_directory_exists(makefile_path)
1186 root_makefile = open(makefile_path, 'w')
1187 root_makefile.write(SHARED_HEADER_SRCDIR % srcdir)
1188 root_makefile.write(SHARED_HEADER_BUILDDIR_NAME % builddir_name)
1189 root_makefile.write(SHARED_HEADER.replace('__default_configuration__',
1190 default_configuration))
1191 for toolset in toolsets:
1192 root_makefile.write('TOOLSET := %s\n' % toolset)
1193 root_makefile.write(ROOT_HEADER_SUFFIX_RULES)
1194
1195 # Find the list of targets that derive from the gyp file(s) being built.
1196 needed_targets = set()
1197 for build_file in params['build_files']:
1198 for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
1199 needed_targets.add(target)
1200
1201 build_files = set()
1202 include_list = set()
1203 for qualified_target in target_list:
1204 build_file, target, toolset = gyp.common.ParseQualifiedTarget(
1205 qualified_target)
1206 build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
1207 included_files = data[build_file]['included_files']
1208 for included_file in included_files:
1209 # The included_files entries are relative to the dir of the build file
1210 # that included them, so we have to undo that and then make them relative
1211 # to the root dir.
1212 relative_include_file = gyp.common.RelativePath(
1213 gyp.common.UnrelativePath(included_file, build_file),
1214 options.toplevel_dir)
1215 abs_include_file = os.path.abspath(relative_include_file)
1216 # If the include file is from the ~/.gyp dir, we should use absolute path
1217 # so that relocating the src dir doesn't break the path.
1218 if (params['home_dot_gyp'] and
1219 abs_include_file.startswith(params['home_dot_gyp'])):
1220 build_files.add(abs_include_file)
1221 else:
1222 build_files.add(relative_include_file)
1223
1224 base_path, output_file = CalculateMakefilePath(build_file,
1225 target + '.' + toolset + options.suffix + '.mk')
1226
1227 spec = target_dicts[qualified_target]
1228 configs = spec['configurations']
1229
1230 writer = MakefileWriter()
1231 writer.Write(qualified_target, base_path, output_file, spec, configs,
1232 part_of_all=qualified_target in needed_targets)
1233
1234 # Our root_makefile lives at the source root. Compute the relative path
1235 # from there to the output_file for including.
1236 mkfile_rel_path = gyp.common.RelativePath(output_file,
1237 os.path.dirname(makefile_path))
1238 include_list.add('include ' + mkfile_rel_path + '\n')
1239
1240 # Write out per-gyp (sub-project) Makefiles.
1241 depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
1242 for build_file in build_files:
1243 # The paths in build_files were relativized above, so undo that before
1244 # testing against the non-relativized items in target_list and before
1245 # calculating the Makefile path.
1246 build_file = os.path.join(depth_rel_path, build_file)
1247 gyp_targets = [target_dicts[target]['target_name'] for target in target_list
1248 if target.startswith(build_file) and
1249 target in needed_targets]
1250 # Only generate Makefiles for gyp files with targets.
1251 if not gyp_targets:
1252 continue
1253 base_path, output_file = CalculateMakefilePath(build_file,
1254 os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
1255 makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
1256 os.path.dirname(output_file))
1257 writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
1258 builddir_name)
1259
1260
1261 # Write out the sorted list of includes.
1262 root_makefile.write('\n')
1263 for include in sorted(include_list):
1264 root_makefile.write(include)
1265 root_makefile.write('\n')
1266
1267 # Write the target to regenerate the Makefile.
1268 if generator_flags.get('auto_regeneration', True):
1269 build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
1270 for filename in params['build_files_arg']]
1271 gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
1272 options.toplevel_dir)
1273 if not gyp_binary.startswith(os.sep):
1274 gyp_binary = os.path.join('.', gyp_binary)
1275 root_makefile.write("%s: %s\n\t%s\n" % (
1276 makefile_name,
1277 ' '.join(map(Sourceify, build_files)),
1278 gyp.common.EncodePOSIXShellList(
1279 [gyp_binary, '-fmake'] +
1280 gyp.RegenerateFlags(options) +
1281 build_files_args)))
1282
1283 root_makefile.write(SHARED_FOOTER)
1284
1285 root_makefile.close()
+0
-1216
third_party/gyp/pylib/gyp/generator/msvs.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import ntpath
7 import posixpath
8 import os
9 import re
10 import subprocess
11 import sys
12
13 import gyp.MSVSNew as MSVSNew
14 import gyp.MSVSProject as MSVSProject
15 import gyp.MSVSToolFile as MSVSToolFile
16 import gyp.MSVSUserFile as MSVSUserFile
17 import gyp.MSVSVersion as MSVSVersion
18 import gyp.common
19
20
21 # Regular expression for validating Visual Studio GUIDs. If the GUID
22 # contains lowercase hex letters, MSVS will be fine. However,
23 # IncrediBuild BuildConsole will parse the solution file, but then
24 # silently skip building the target causing hard to track down errors.
25 # Note that this only happens with the BuildConsole, and does not occur
26 # if IncrediBuild is executed from inside Visual Studio. This regex
27 # validates that the string looks like a GUID with all uppercase hex
28 # letters.
29 VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
30
31
32 generator_default_variables = {
33 'EXECUTABLE_PREFIX': '',
34 'EXECUTABLE_SUFFIX': '.exe',
35 'STATIC_LIB_PREFIX': '',
36 'SHARED_LIB_PREFIX': '',
37 'STATIC_LIB_SUFFIX': '.lib',
38 'SHARED_LIB_SUFFIX': '.dll',
39 'INTERMEDIATE_DIR': '$(IntDir)',
40 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate',
41 'OS': 'win',
42 'PRODUCT_DIR': '$(OutDir)',
43 'LIB_DIR': '$(OutDir)/lib',
44 'RULE_INPUT_ROOT': '$(InputName)',
45 'RULE_INPUT_EXT': '$(InputExt)',
46 'RULE_INPUT_NAME': '$(InputFileName)',
47 'RULE_INPUT_PATH': '$(InputPath)',
48 'CONFIGURATION_NAME': '$(ConfigurationName)',
49 }
50
51
52 # The msvs specific sections that hold paths
53 generator_additional_path_sections = [
54 'msvs_cygwin_dirs',
55 'msvs_props',
56 ]
57
58 generator_additional_non_configuration_keys = [
59 'msvs_cygwin_dirs',
60 'msvs_cygwin_shell',
61 ]
62
63 cached_username = None
64 cached_domain = None
65
66 # TODO(gspencer): Switch the os.environ calls to be
67 # win32api.GetDomainName() and win32api.GetUserName() once the
68 # python version in depot_tools has been updated to work on Vista
69 # 64-bit.
70 def _GetDomainAndUserName():
71 if sys.platform not in ('win32', 'cygwin'):
72 return ('DOMAIN', 'USERNAME')
73 global cached_username
74 global cached_domain
75 if not cached_domain or not cached_username:
76 domain = os.environ.get('USERDOMAIN')
77 username = os.environ.get('USERNAME')
78 if not domain or not username:
79 call = subprocess.Popen(['net', 'config', 'Workstation'],
80 stdout=subprocess.PIPE)
81 config = call.communicate()[0]
82 username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
83 username_match = username_re.search(config)
84 if username_match:
85 username = username_match.group(1)
86 domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
87 domain_match = domain_re.search(config)
88 if domain_match:
89 domain = domain_match.group(1)
90 cached_domain = domain
91 cached_username = username
92 return (cached_domain, cached_username)
93
94 fixpath_prefix = None
95
96 def _FixPath(path):
97 """Convert paths to a form that will make sense in a vcproj file.
98
99 Arguments:
100 path: The path to convert, may contain / etc.
101 Returns:
102 The path with all slashes made into backslashes.
103 """
104 if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
105 path = os.path.join(fixpath_prefix, path)
106 path = path.replace('/', '\\')
107 if len(path) > 0 and path[-1] == '\\':
108 path = path[:-1]
109 return path
110
111
112 def _SourceInFolders(sources, prefix=None, excluded=None):
113 """Converts a list split source file paths into a vcproj folder hierarchy.
114
115 Arguments:
116 sources: A list of source file paths split.
117 prefix: A list of source file path layers meant to apply to each of sources.
118 Returns:
119 A hierarchy of filenames and MSVSProject.Filter objects that matches the
120 layout of the source tree.
121 For example:
122 _SourceInFolders([['a', 'bob1.c'], ['b', 'bob2.c']], prefix=['joe'])
123 -->
124 [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
125 MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
126 """
127 if not prefix: prefix = []
128 result = []
129 excluded_result = []
130 folders = dict()
131 # Gather files into the final result, excluded, or folders.
132 for s in sources:
133 if len(s) == 1:
134 filename = '\\'.join(prefix + s)
135 if filename in excluded:
136 excluded_result.append(filename)
137 else:
138 result.append(filename)
139 else:
140 if not folders.get(s[0]):
141 folders[s[0]] = []
142 folders[s[0]].append(s[1:])
143 # Add a folder for excluded files.
144 if excluded_result:
145 excluded_folder = MSVSProject.Filter('_excluded_files',
146 contents=excluded_result)
147 result.append(excluded_folder)
148 # Populate all the folders.
149 for f in folders:
150 contents = _SourceInFolders(folders[f], prefix=prefix + [f],
151 excluded=excluded)
152 contents = MSVSProject.Filter(f, contents=contents)
153 result.append(contents)
154
155 return result
156
157
158 def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
159 if not value: return
160 # TODO(bradnelson): ugly hack, fix this more generally!!!
161 if 'Directories' in setting or 'Dependencies' in setting:
162 if type(value) == str:
163 value = value.replace('/', '\\')
164 else:
165 value = [i.replace('/', '\\') for i in value]
166 if not tools.get(tool_name):
167 tools[tool_name] = dict()
168 tool = tools[tool_name]
169 if tool.get(setting):
170 if only_if_unset: return
171 if type(tool[setting]) == list:
172 tool[setting] += value
173 else:
174 raise TypeError(
175 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
176 'not allowed, previous value: %s' % (
177 value, setting, tool_name, str(tool[setting])))
178 else:
179 tool[setting] = value
180
181
182 def _ConfigPlatform(config_data):
183 return config_data.get('msvs_configuration_platform', 'Win32')
184
185
186 def _ConfigBaseName(config_name, platform_name):
187 if config_name.endswith('_' + platform_name):
188 return config_name[0:-len(platform_name)-1]
189 else:
190 return config_name
191
192
193 def _ConfigFullName(config_name, config_data):
194 platform_name = _ConfigPlatform(config_data)
195 return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
196
197
198 def _PrepareActionRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd):
199 if cygwin_shell:
200 # Find path to cygwin.
201 cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
202 # Prepare command.
203 direct_cmd = cmd
204 direct_cmd = [i.replace('$(IntDir)',
205 '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
206 direct_cmd = [i.replace('$(OutDir)',
207 '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
208 if has_input_path:
209 direct_cmd = [i.replace('$(InputPath)',
210 '`cygpath -m "${INPUTPATH}"`')
211 for i in direct_cmd]
212 direct_cmd = ['"%s"' % i for i in direct_cmd]
213 direct_cmd = [i.replace('"', '\\"') for i in direct_cmd]
214 #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
215 direct_cmd = ' '.join(direct_cmd)
216 # TODO(quote): regularize quoting path names throughout the module
217 cmd = (
218 '"$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
219 'set CYGWIN=nontsec&& ')
220 if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
221 cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
222 if direct_cmd.find('INTDIR') >= 0:
223 cmd += 'set INTDIR=$(IntDir)&& '
224 if direct_cmd.find('OUTDIR') >= 0:
225 cmd += 'set OUTDIR=$(OutDir)&& '
226 if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
227 cmd += 'set INPUTPATH=$(InputPath) && '
228 cmd += (
229 'bash -c "%(cmd)s"')
230 cmd = cmd % {'cygwin_dir': cygwin_dir,
231 'cmd': direct_cmd}
232 return cmd
233 else:
234 # Convert cat --> type to mimic unix.
235 if cmd[0] == 'cat':
236 cmd = ['type'] + cmd[1:]
237 if quote_cmd:
238 # Support a mode for using cmd directly.
239 # Convert any paths to native form (first element is used directly).
240 # TODO(quote): regularize quoting path names throughout the module
241 direct_cmd = ([cmd[0].replace('/', '\\')] +
242 ['"%s"' % _FixPath(i) for i in cmd[1:]])
243 else:
244 direct_cmd = ([cmd[0].replace('/', '\\')] +
245 [_FixPath(i) for i in cmd[1:]])
246 # Collapse into a single command.
247 return ' '.join(direct_cmd)
248
249 def _PrepareAction(spec, rule, has_input_path):
250 # Find path to cygwin.
251 cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
252
253 # Currently this weird argument munging is used to duplicate the way a
254 # python script would need to be run as part of the chrome tree.
255 # Eventually we should add some sort of rule_default option to set this
256 # per project. For now the behavior chrome needs is the default.
257 mcs = rule.get('msvs_cygwin_shell')
258 if mcs is None:
259 mcs = int(spec.get('msvs_cygwin_shell', 1))
260 elif isinstance(mcs, str):
261 mcs = int(mcs)
262 quote_cmd = int(rule.get('msvs_quote_cmd', 1))
263 return _PrepareActionRaw(spec, rule['action'], mcs,
264 has_input_path, quote_cmd)
265
266
267 def _PickPrimaryInput(inputs):
268 # Pick second input as the primary one, unless there's only one.
269 # TODO(bradnelson): this is a bit of a hack,
270 # find something more general.
271 if len(inputs) > 1:
272 return inputs[1]
273 else:
274 return inputs[0]
275
276 def _SetRunAs(user_file, config_name, c_data, command,
277 environment={}, working_directory=""):
278 """Add a run_as rule to the user file.
279
280 Arguments:
281 user_file: The MSVSUserFile to add the command to.
282 config_name: The name of the configuration to add it to
283 c_data: The dict of the configuration to add it to
284 command: The path to the command to execute.
285 args: An array of arguments to the command. (optional)
286 working_directory: Directory to run the command in. (optional)
287 """
288 user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
289 command, environment, working_directory)
290
291 def _AddCustomBuildTool(p, spec, inputs, outputs, description, cmd):
292 """Add a custom build tool to execute something.
293
294 Arguments:
295 p: the target project
296 spec: the target project dict
297 inputs: list of inputs
298 outputs: list of outputs
299 description: description of the action
300 cmd: command line to execute
301 """
302 inputs = [_FixPath(i) for i in inputs]
303 outputs = [_FixPath(i) for i in outputs]
304 tool = MSVSProject.Tool(
305 'VCCustomBuildTool', {
306 'Description': description,
307 'AdditionalDependencies': ';'.join(inputs),
308 'Outputs': ';'.join(outputs),
309 'CommandLine': cmd,
310 })
311 primary_input = _PickPrimaryInput(inputs)
312 # Add to the properties of primary input for each config.
313 for config_name, c_data in spec['configurations'].iteritems():
314 p.AddFileConfig(primary_input,
315 _ConfigFullName(config_name, c_data), tools=[tool])
316
317
318 def _RuleExpandPath(path, input_file):
319 """Given the input file to which a rule applied, string substitute a path.
320
321 Arguments:
322 path: a path to string expand
323 input_file: the file to which the rule applied.
324 Returns:
325 The string substituted path.
326 """
327 path = path.replace('$(InputName)',
328 os.path.splitext(os.path.split(input_file)[1])[0])
329 path = path.replace('$(InputExt)',
330 os.path.splitext(os.path.split(input_file)[1])[1])
331 path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
332 path = path.replace('$(InputPath)', input_file)
333 return path
334
335
336 def _FindRuleTriggerFiles(rule, sources):
337 """Find the list of files which a particular rule applies to.
338
339 Arguments:
340 rule: the rule in question
341 sources: the set of all known source files for this project
342 Returns:
343 The list of sources that trigger a particular rule.
344 """
345 rule_ext = rule['extension']
346 return [s for s in sources if s.endswith('.' + rule_ext)]
347
348
349 def _RuleInputsAndOutputs(rule, trigger_file):
350 """Find the inputs and outputs generated by a rule.
351
352 Arguments:
353 rule: the rule in question
354 sources: the set of all known source files for this project
355 Returns:
356 The pair of (inputs, outputs) involved in this rule.
357 """
358 raw_inputs = rule.get('inputs', [])
359 raw_outputs = rule.get('outputs', [])
360 inputs = set()
361 outputs = set()
362 inputs.add(trigger_file)
363 for i in raw_inputs:
364 inputs.add(_RuleExpandPath(i, trigger_file))
365 for o in raw_outputs:
366 outputs.add(_RuleExpandPath(o, trigger_file))
367 return (inputs, outputs)
368
369
370 def _GenerateNativeRules(p, rules, output_dir, spec, options):
371 """Generate a native rules file.
372
373 Arguments:
374 p: the target project
375 rules: the set of rules to include
376 output_dir: the directory in which the project/gyp resides
377 spec: the project dict
378 options: global generator options
379 """
380 rules_filename = '%s%s.rules' % (spec['target_name'],
381 options.suffix)
382 rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename))
383 rules_file.Create(spec['target_name'])
384 # Add each rule.
385 for r in rules:
386 rule_name = r['rule_name']
387 rule_ext = r['extension']
388 inputs = [_FixPath(i) for i in r.get('inputs', [])]
389 outputs = [_FixPath(i) for i in r.get('outputs', [])]
390 cmd = _PrepareAction(spec, r, has_input_path=True)
391 rules_file.AddCustomBuildRule(name=rule_name,
392 description=r.get('message', rule_name),
393 extensions=[rule_ext],
394 additional_dependencies=inputs,
395 outputs=outputs,
396 cmd=cmd)
397 # Write out rules file.
398 rules_file.Write()
399
400 # Add rules file to project.
401 p.AddToolFile(rules_filename)
402
403
404 def _Cygwinify(path):
405 path = path.replace('$(OutDir)', '$(OutDirCygwin)')
406 path = path.replace('$(IntDir)', '$(IntDirCygwin)')
407 return path
408
409
410 def _GenerateExternalRules(p, rules, output_dir, spec,
411 sources, options, actions_to_add):
412 """Generate an external makefile to do a set of rules.
413
414 Arguments:
415 p: the target project
416 rules: the list of rules to include
417 output_dir: path containing project and gyp files
418 spec: project specification data
419 sources: set of sources known
420 options: global generator options
421 """
422 filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
423 file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
424 # Find cygwin style versions of some paths.
425 file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
426 file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
427 # Gather stuff needed to emit all: target.
428 all_inputs = set()
429 all_outputs = set()
430 all_output_dirs = set()
431 first_outputs = []
432 for rule in rules:
433 trigger_files = _FindRuleTriggerFiles(rule, sources)
434 for tf in trigger_files:
435 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
436 all_inputs.update(set(inputs))
437 all_outputs.update(set(outputs))
438 # Only use one target from each rule as the dependency for
439 # 'all' so we don't try to build each rule multiple times.
440 first_outputs.append(list(outputs)[0])
441 # Get the unique output directories for this rule.
442 output_dirs = [os.path.split(i)[0] for i in outputs]
443 for od in output_dirs:
444 all_output_dirs.add(od)
445 first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
446 # Write out all: target, including mkdir for each output directory.
447 file.write('all: %s\n' % ' '.join(first_outputs_cyg))
448 for od in all_output_dirs:
449 file.write('\tmkdir -p %s\n' % od)
450 file.write('\n')
451 # Define how each output is generated.
452 for rule in rules:
453 trigger_files = _FindRuleTriggerFiles(rule, sources)
454 for tf in trigger_files:
455 # Get all the inputs and outputs for this rule for this trigger file.
456 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
457 inputs = [_Cygwinify(i) for i in inputs]
458 outputs = [_Cygwinify(i) for i in outputs]
459 # Prepare the command line for this rule.
460 cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
461 cmd = ['"%s"' % i for i in cmd]
462 cmd = ' '.join(cmd)
463 # Add it to the makefile.
464 file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
465 file.write('\t%s\n\n' % cmd)
466 # Close up the file.
467 file.close()
468
469 # Add makefile to list of sources.
470 sources.add(filename)
471 # Add a build action to call makefile.
472 cmd = ['make',
473 'OutDir=$(OutDir)',
474 'IntDir=$(IntDir)',
475 '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
476 '-f', filename]
477 cmd = _PrepareActionRaw(spec, cmd, True, False, True)
478 # TODO(bradnelson): this won't be needed if we have a better way to pick
479 # the primary input.
480 all_inputs = list(all_inputs)
481 all_inputs.insert(1, filename)
482 actions_to_add.append({
483 'inputs': [_FixPath(i) for i in all_inputs],
484 'outputs': [_FixPath(i) for i in all_outputs],
485 'description': 'Running %s' % cmd,
486 'cmd': cmd,
487 })
488
489
490 def _EscapeEnvironmentVariableExpansion(s):
491 """Escapes any % characters so that Windows-style environment variable
492 expansions will leave them alone.
493 See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
494 to understand why we have to do this."""
495 s = s.replace('%', '%%')
496 return s
497
498
499 quote_replacer_regex = re.compile(r'(\\*)"')
500 def _EscapeCommandLineArgument(s):
501 """Escapes a Windows command-line argument, so that the Win32
502 CommandLineToArgv function will turn the escaped result back into the
503 original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
504 ("Parsing C++ Command-Line Arguments") to understand why we have to do
505 this."""
506 def replace(match):
507 # For a literal quote, CommandLineToArgv requires an odd number of
508 # backslashes preceding it, and it produces half as many literal backslashes
509 # (rounded down). So we need to produce 2n+1 backslashes.
510 return 2 * match.group(1) + '\\"'
511 # Escape all quotes so that they are interpreted literally.
512 s = quote_replacer_regex.sub(replace, s)
513 # Now add unescaped quotes so that any whitespace is interpreted literally.
514 s = '"' + s + '"'
515 return s
516
517
518 delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
519 def _EscapeVCProjCommandLineArgListItem(s):
520 """The VCProj format stores string lists in a single string using commas and
521 semi-colons as separators, which must be quoted if they are to be
522 interpreted literally. However, command-line arguments may already have
523 quotes, and the VCProj parser is ignorant of the backslash escaping
524 convention used by CommandLineToArgv, so the command-line quotes and the
525 VCProj quotes may not be the same quotes. So to store a general
526 command-line argument in a VCProj list, we need to parse the existing
527 quoting according to VCProj's convention and quote any delimiters that are
528 not already quoted by that convention. The quotes that we add will also be
529 seen by CommandLineToArgv, so if backslashes precede them then we also have
530 to escape those backslashes according to the CommandLineToArgv
531 convention."""
532 def replace(match):
533 # For a non-literal quote, CommandLineToArgv requires an even number of
534 # backslashes preceding it, and it produces half as many literal
535 # backslashes. So we need to produce 2n backslashes.
536 return 2 * match.group(1) + '"' + match.group(2) + '"'
537 list = s.split('"')
538 # The unquoted segments are at the even-numbered indices.
539 for i in range(0, len(list), 2):
540 list[i] = delimiters_replacer_regex.sub(replace, list[i])
541 # Concatenate back into a single string
542 s = '"'.join(list)
543 if len(list) % 2 == 0:
544 # String ends while still quoted according to VCProj's convention. This
545 # means the delimiter and the next list item that follow this one in the
546 # .vcproj file will be misinterpreted as part of this item. There is nothing
547 # we can do about this. Adding an extra quote would correct the problem in
548 # the VCProj but cause the same problem on the final command-line. Moving
549 # the item to the end of the list does works, but that's only possible if
550 # there's only one such item. Let's just warn the user.
551 print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
552 'quotes in ' + s)
553 return s
554
555
556 def _EscapeCppDefine(s):
557 """Escapes a CPP define so that it will reach the compiler unaltered."""
558 s = _EscapeEnvironmentVariableExpansion(s)
559 s = _EscapeCommandLineArgument(s)
560 s = _EscapeVCProjCommandLineArgListItem(s)
561 return s
562
563
564 def _GenerateRules(p, output_dir, options, spec,
565 sources, excluded_sources,
566 actions_to_add):
567 """Generate all the rules for a particular project.
568
569 Arguments:
570 output_dir: directory to emit rules to
571 options: global options passed to the generator
572 spec: the specification for this project
573 sources: the set of all known source files in this project
574 excluded_sources: the set of sources excluded from normal processing
575 actions_to_add: deferred list of actions to add in
576 """
577 rules = spec.get('rules', [])
578 rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
579 rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
580
581 # Handle rules that use a native rules file.
582 if rules_native:
583 _GenerateNativeRules(p, rules_native, output_dir, spec, options)
584
585 # Handle external rules (non-native rules).
586 if rules_external:
587 _GenerateExternalRules(p, rules_external, output_dir, spec,
588 sources, options, actions_to_add)
589
590 # Add outputs generated by each rule (if applicable).
591 for rule in rules:
592 # Done if not processing outputs as sources.
593 if int(rule.get('process_outputs_as_sources', False)):
594 # Add in the outputs from this rule.
595 trigger_files = _FindRuleTriggerFiles(rule, sources)
596 for tf in trigger_files:
597 inputs, outputs = _RuleInputsAndOutputs(rule, tf)
598 inputs.remove(tf)
599 sources.update(inputs)
600 excluded_sources.update(inputs)
601 sources.update(outputs)
602
603
604 def _GenerateProject(vcproj_filename, build_file, spec, options, version):
605 """Generates a vcproj file.
606
607 Arguments:
608 vcproj_filename: Filename of the vcproj file to generate.
609 build_file: Filename of the .gyp file that the vcproj file comes from.
610 spec: The target dictionary containing the properties of the target.
611 """
612 # Pluck out the default configuration.
613 default_config = spec['configurations'][spec['default_configuration']]
614 # Decide the guid of the project.
615 guid = default_config.get('msvs_guid')
616 if guid:
617 if VALID_MSVS_GUID_CHARS.match(guid) == None:
618 raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
619 (guid, VALID_MSVS_GUID_CHARS.pattern))
620 guid = '{%s}' % guid
621
622 # Skip emitting anything if told to with msvs_existing_vcproj option.
623 if default_config.get('msvs_existing_vcproj'):
624 return guid
625
626 #print 'Generating %s' % vcproj_filename
627
628 vcproj_dir = os.path.dirname(vcproj_filename)
629 if vcproj_dir and not os.path.exists(vcproj_dir):
630 os.makedirs(vcproj_dir)
631
632 # Gather list of unique platforms.
633 platforms = set()
634 for configuration in spec['configurations']:
635 platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
636 platforms = list(platforms)
637
638 p = MSVSProject.Writer(vcproj_filename, version=version)
639 p.Create(spec['target_name'], guid=guid, platforms=platforms)
640
641 # Create the user file.
642 (domain, username) = _GetDomainAndUserName()
643 vcuser_filename = '.'.join([vcproj_filename, domain, username, 'user'])
644 user_file = MSVSUserFile.Writer(vcuser_filename, version=version)
645 user_file.Create(spec['target_name'])
646
647 # Get directory project file is in.
648 gyp_dir = os.path.split(vcproj_filename)[0]
649
650 # Pick target configuration type.
651 try:
652 config_type = {
653 'executable': '1', # .exe
654 'shared_library': '2', # .dll
655 'loadable_module': '2', # .dll
656 'static_library': '4', # .lib
657 'none': '10', # Utility type
658 'dummy_executable': '1', # .exe
659 }[spec['type']]
660 except KeyError, e:
661 if spec.get('type'):
662 raise Exception('Target type %s is not a valid target type for '
663 'target %s in %s.' %
664 (spec['type'], spec['target_name'], build_file))
665 else:
666 raise Exception('Missing type field for target %s in %s.' %
667 (spec['target_name'], build_file))
668
669 for config_name, c in spec['configurations'].iteritems():
670 # Process each configuration.
671 vsprops_dirs = c.get('msvs_props', [])
672 vsprops_dirs = [_FixPath(i) for i in vsprops_dirs]
673
674 # Prepare the list of tools as a dictionary.
675 tools = dict()
676
677 # Add in msvs_settings.
678 for tool in c.get('msvs_settings', {}):
679 settings = c['msvs_settings'][tool]
680 for setting in settings:
681 _ToolAppend(tools, tool, setting, settings[setting])
682
683 # Add in includes.
684 # TODO(bradnelson): include_dirs should really be flexible enough not to
685 # require this sort of thing.
686 include_dirs = (
687 c.get('include_dirs', []) +
688 c.get('msvs_system_include_dirs', []))
689 resource_include_dirs = c.get('resource_include_dirs', include_dirs)
690 include_dirs = [_FixPath(i) for i in include_dirs]
691 resource_include_dirs = [_FixPath(i) for i in resource_include_dirs]
692 _ToolAppend(tools, 'VCCLCompilerTool',
693 'AdditionalIncludeDirectories', include_dirs)
694 _ToolAppend(tools, 'VCResourceCompilerTool',
695 'AdditionalIncludeDirectories', resource_include_dirs)
696
697 # Add in libraries.
698 libraries = spec.get('libraries', [])
699 # Strip out -l, as it is not used on windows (but is needed so we can pass
700 # in libraries that are assumed to be in the default library path).
701 libraries = [re.sub('^(\-l)', '', lib) for lib in libraries]
702 # Add them.
703 _ToolAppend(tools, 'VCLinkerTool',
704 'AdditionalDependencies', libraries)
705
706 # Select a name for the output file.
707 output_file_map = {
708 'executable': ('VCLinkerTool', '$(OutDir)\\', '.exe'),
709 'shared_library': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
710 'loadable_module': ('VCLinkerTool', '$(OutDir)\\', '.dll'),
711 'static_library': ('VCLibrarianTool', '$(OutDir)\\lib\\', '.lib'),
712 'dummy_executable': ('VCLinkerTool', '$(IntDir)\\', '.junk'),
713 }
714 output_file_props = output_file_map.get(spec['type'])
715 if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
716 vc_tool, out_dir, suffix = output_file_props
717 out_dir = spec.get('product_dir', out_dir)
718 product_extension = spec.get('product_extension')
719 if product_extension:
720 suffix = '.' + product_extension
721 prefix = spec.get('product_prefix', '')
722 product_name = spec.get('product_name', '$(ProjectName)')
723 out_file = ntpath.join(out_dir, prefix + product_name + suffix)
724 _ToolAppend(tools, vc_tool, 'OutputFile', out_file,
725 only_if_unset=True)
726
727 # Add defines.
728 defines = []
729 for d in c.get('defines', []):
730 if type(d) == list:
731 fd = '='.join([str(dpart) for dpart in d])
732 else:
733 fd = str(d)
734 fd = _EscapeCppDefine(fd)
735 defines.append(fd)
736
737 _ToolAppend(tools, 'VCCLCompilerTool',
738 'PreprocessorDefinitions', defines)
739 _ToolAppend(tools, 'VCResourceCompilerTool',
740 'PreprocessorDefinitions', defines)
741
742 # Change program database directory to prevent collisions.
743 _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
744 '$(IntDir)\\$(ProjectName)\\vc80.pdb')
745
746 # Add disabled warnings.
747 disabled_warnings = [str(i) for i in c.get('msvs_disabled_warnings', [])]
748 _ToolAppend(tools, 'VCCLCompilerTool',
749 'DisableSpecificWarnings', disabled_warnings)
750
751 # Add Pre-build.
752 prebuild = c.get('msvs_prebuild')
753 _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
754
755 # Add Post-build.
756 postbuild = c.get('msvs_postbuild')
757 _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
758
759 # Turn on precompiled headers if appropriate.
760 header = c.get('msvs_precompiled_header')
761 if header:
762 header = os.path.split(header)[1]
763 _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
764 _ToolAppend(tools, 'VCCLCompilerTool',
765 'PrecompiledHeaderThrough', header)
766 _ToolAppend(tools, 'VCCLCompilerTool',
767 'ForcedIncludeFiles', header)
768
769 # Loadable modules don't generate import libraries;
770 # tell dependent projects to not expect one.
771 if spec['type'] == 'loadable_module':
772 _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
773
774 # Set the module definition file if any.
775 if spec['type'] in ['shared_library', 'loadable_module']:
776 def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
777 if len(def_files) == 1:
778 _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile',
779 _FixPath(def_files[0]))
780 elif def_files:
781 raise ValueError('Multiple module definition files in one target, '
782 'target %s lists multiple .def files: %s' % (
783 spec['target_name'], ' '.join(def_files)))
784
785 # Convert tools to expected form.
786 tool_list = []
787 for tool, settings in tools.iteritems():
788 # Collapse settings with lists.
789 settings_fixed = {}
790 for setting, value in settings.iteritems():
791 if type(value) == list:
792 if ((tool == 'VCLinkerTool' and
793 setting == 'AdditionalDependencies') or
794 setting == 'AdditionalOptions'):
795 settings_fixed[setting] = ' '.join(value)
796 else:
797 settings_fixed[setting] = ';'.join(value)
798 else:
799 settings_fixed[setting] = value
800 # Add in this tool.
801 tool_list.append(MSVSProject.Tool(tool, settings_fixed))
802
803 # Prepare configuration attributes.
804 prepared_attrs = {}
805 source_attrs = c.get('msvs_configuration_attributes', {})
806 for a in source_attrs:
807 prepared_attrs[a] = source_attrs[a]
808 # Add props files.
809 if vsprops_dirs:
810 prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
811 # Set configuration type.
812 prepared_attrs['ConfigurationType'] = config_type
813 if not prepared_attrs.has_key('OutputDirectory'):
814 prepared_attrs['OutputDirectory'] = '$(SolutionDir)$(ConfigurationName)'
815 if not prepared_attrs.has_key('IntermediateDirectory'):
816 intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
817 prepared_attrs['IntermediateDirectory'] = intermediate
818
819 # Add in this configuration.
820 p.AddConfig(_ConfigFullName(config_name, c),
821 attrs=prepared_attrs, tools=tool_list)
822
823 # Prepare list of sources and excluded sources.
824 sources = set(spec.get('sources', []))
825 excluded_sources = set()
826 # Add in the gyp file.
827 gyp_file = os.path.split(build_file)[1]
828 sources.add(gyp_file)
829 # Add in 'action' inputs and outputs.
830 for a in spec.get('actions', []):
831 inputs = a.get('inputs')
832 if not inputs:
833 # This is an action with no inputs. Make the primary input
834 # by the .gyp file itself so Visual Studio has a place to
835 # hang the custom build rule.
836 inputs = [gyp_file]
837 a['inputs'] = inputs
838 primary_input = _PickPrimaryInput(inputs)
839 inputs = set(inputs)
840 sources.update(inputs)
841 inputs.remove(primary_input)
842 excluded_sources.update(inputs)
843 if int(a.get('process_outputs_as_sources', False)):
844 outputs = set(a.get('outputs', []))
845 sources.update(outputs)
846 # Add in 'copies' inputs and outputs.
847 for cpy in spec.get('copies', []):
848 files = set(cpy.get('files', []))
849 sources.update(files)
850
851 # Add rules.
852 actions_to_add = []
853 _GenerateRules(p, gyp_dir, options, spec,
854 sources, excluded_sources,
855 actions_to_add)
856
857 # Exclude excluded sources coming into the generator.
858 excluded_sources.update(set(spec.get('sources_excluded', [])))
859 # Add excluded sources into sources for good measure.
860 sources.update(excluded_sources)
861 # Convert to proper windows form.
862 # NOTE: sources goes from being a set to a list here.
863 # NOTE: excluded_sources goes from being a set to a list here.
864 sources = [_FixPath(i) for i in sources]
865 # Convert to proper windows form.
866 excluded_sources = [_FixPath(i) for i in excluded_sources]
867
868 # If any non-native rules use 'idl' as an extension exclude idl files.
869 # Gather a list here to use later.
870 using_idl = False
871 for rule in spec.get('rules', []):
872 if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
873 using_idl = True
874 break
875 if using_idl:
876 excluded_idl = [i for i in sources if i.endswith('.idl')]
877 else:
878 excluded_idl = []
879
880 # List of precompiled header related keys.
881 precomp_keys = [
882 'msvs_precompiled_header',
883 'msvs_precompiled_source',
884 ]
885
886 # Gather a list of precompiled header related sources.
887 precompiled_related = []
888 for config_name, c in spec['configurations'].iteritems():
889 for k in precomp_keys:
890 f = c.get(k)
891 if f:
892 precompiled_related.append(_FixPath(f))
893
894 # Find the excluded ones, minus the precompiled header related ones.
895 fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
896
897 # Convert to folders and the right slashes.
898 sources = [i.split('\\') for i in sources]
899 sources = _SourceInFolders(sources, excluded=fully_excluded)
900 # Add in dummy file for type none.
901 if spec['type'] == 'dummy_executable':
902 # Pull in a dummy main so it can link successfully.
903 dummy_relpath = gyp.common.RelativePath(
904 options.depth + '\\tools\\gyp\\gyp_dummy.c', gyp_dir)
905 sources.append(dummy_relpath)
906 # Add in files.
907 p.AddFiles(sources)
908
909 # Add deferred actions to add.
910 for a in actions_to_add:
911 _AddCustomBuildTool(p, spec,
912 inputs=a['inputs'],
913 outputs=a['outputs'],
914 description=a['description'],
915 cmd=a['cmd'])
916
917 # Exclude excluded sources from being built.
918 for f in excluded_sources:
919 for config_name, c in spec['configurations'].iteritems():
920 precomped = [_FixPath(c.get(i, '')) for i in precomp_keys]
921 # Don't do this for ones that are precompiled header related.
922 if f not in precomped:
923 p.AddFileConfig(f, _ConfigFullName(config_name, c),
924 {'ExcludedFromBuild': 'true'})
925
926 # If any non-native rules use 'idl' as an extension exclude idl files.
927 # Exclude them now.
928 for config_name, c in spec['configurations'].iteritems():
929 for f in excluded_idl:
930 p.AddFileConfig(f, _ConfigFullName(config_name, c),
931 {'ExcludedFromBuild': 'true'})
932
933 # Add in tool files (rules).
934 tool_files = set()
935 for config_name, c in spec['configurations'].iteritems():
936 for f in c.get('msvs_tool_files', []):
937 tool_files.add(f)
938 for f in tool_files:
939 p.AddToolFile(f)
940
941 # Handle pre-compiled headers source stubs specially.
942 for config_name, c in spec['configurations'].iteritems():
943 source = c.get('msvs_precompiled_source')
944 if source:
945 source = _FixPath(source)
946 # UsePrecompiledHeader=1 for if using precompiled headers.
947 tool = MSVSProject.Tool('VCCLCompilerTool',
948 {'UsePrecompiledHeader': '1'})
949 p.AddFileConfig(source, _ConfigFullName(config_name, c),
950 {}, tools=[tool])
951
952 # Add actions.
953 actions = spec.get('actions', [])
954 for a in actions:
955 cmd = _PrepareAction(spec, a, has_input_path=False)
956 _AddCustomBuildTool(p, spec,
957 inputs=a.get('inputs', []),
958 outputs=a.get('outputs', []),
959 description=a.get('message', a['action_name']),
960 cmd=cmd)
961
962 # Add run_as and test targets.
963 has_run_as = False
964 if spec.get('run_as') or int(spec.get('test', 0)):
965 has_run_as = True
966 run_as = spec.get('run_as', {
967 'action' : ['$(TargetPath)', '--gtest_print_time'],
968 })
969 working_directory = run_as.get('working_directory', '.')
970 action = run_as.get('action', [])
971 environment = run_as.get('environment', [])
972 for config_name, c_data in spec['configurations'].iteritems():
973 _SetRunAs(user_file, config_name, c_data,
974 action, environment, working_directory)
975
976 # Add copies.
977 for cpy in spec.get('copies', []):
978 for src in cpy.get('files', []):
979 dst = os.path.join(cpy['destination'], os.path.basename(src))
980 # _AddCustomBuildTool() will call _FixPath() on the inputs and
981 # outputs, so do the same for our generated command line.
982 if src.endswith('/'):
983 src_bare = src[:-1]
984 base_dir = posixpath.split(src_bare)[0]
985 outer_dir = posixpath.split(src_bare)[1]
986 cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
987 _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
988 _AddCustomBuildTool(p, spec,
989 inputs=[src],
990 outputs=['dummy_copies', dst],
991 description='Copying %s to %s' % (src, dst),
992 cmd=cmd)
993 else:
994 cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
995 _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
996 _AddCustomBuildTool(p, spec,
997 inputs=[src], outputs=[dst],
998 description='Copying %s to %s' % (src, dst),
999 cmd=cmd)
1000
1001 # Write it out.
1002 p.Write()
1003
1004 # Write out the user file, but only if we need to.
1005 if has_run_as:
1006 user_file.Write()
1007
1008 # Return the guid so we can refer to it elsewhere.
1009 return p.guid
1010
1011
1012 def _GetPathDict(root, path):
1013 if path == '':
1014 return root
1015 parent, folder = os.path.split(path)
1016 parent_dict = _GetPathDict(root, parent)
1017 if folder not in parent_dict:
1018 parent_dict[folder] = dict()
1019 return parent_dict[folder]
1020
1021
1022 def _DictsToFolders(base_path, bucket, flat):
1023 # Convert to folders recursively.
1024 children = []
1025 for folder, contents in bucket.iteritems():
1026 if type(contents) == dict:
1027 folder_children = _DictsToFolders(os.path.join(base_path, folder),
1028 contents, flat)
1029 if flat:
1030 children += folder_children
1031 else:
1032 folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
1033 name='(' + folder + ')',
1034 entries=folder_children)
1035 children.append(folder_children)
1036 else:
1037 children.append(contents)
1038 return children
1039
1040
1041 def _CollapseSingles(parent, node):
1042 # Recursively explorer the tree of dicts looking for projects which are
1043 # the sole item in a folder which has the same name as the project. Bring
1044 # such projects up one level.
1045 if (type(node) == dict and
1046 len(node) == 1 and
1047 node.keys()[0] == parent + '.vcproj'):
1048 return node[node.keys()[0]]
1049 if type(node) != dict:
1050 return node
1051 for child in node.keys():
1052 node[child] = _CollapseSingles(child, node[child])
1053 return node
1054
1055
1056 def _GatherSolutionFolders(project_objs, flat):
1057 root = {}
1058 # Convert into a tree of dicts on path.
1059 for p in project_objs.keys():
1060 gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
1061 gyp_dir = os.path.dirname(gyp_file)
1062 path_dict = _GetPathDict(root, gyp_dir)
1063 path_dict[target + '.vcproj'] = project_objs[p]
1064 # Walk down from the top until we hit a folder that has more than one entry.
1065 # In practice, this strips the top-level "src/" dir from the hierarchy in
1066 # the solution.
1067 while len(root) == 1 and type(root[root.keys()[0]]) == dict:
1068 root = root[root.keys()[0]]
1069 # Collapse singles.
1070 root = _CollapseSingles('', root)
1071 # Merge buckets until everything is a root entry.
1072 return _DictsToFolders('', root, flat)
1073
1074
1075 def _ProjectObject(sln, qualified_target, project_objs, projects):
1076 # Done if this project has an object.
1077 if project_objs.get(qualified_target):
1078 return project_objs[qualified_target]
1079 # Get dependencies for this project.
1080 spec = projects[qualified_target]['spec']
1081 deps = spec.get('dependencies', [])
1082 # Get objects for each dependency.
1083 deps = [_ProjectObject(sln, d, project_objs, projects) for d in deps]
1084 # Find relative path to vcproj from sln.
1085 vcproj_rel_path = gyp.common.RelativePath(
1086 projects[qualified_target]['vcproj_path'], os.path.split(sln)[0])
1087 vcproj_rel_path = _FixPath(vcproj_rel_path)
1088 # Prepare a dict indicating which project configurations are used for which
1089 # solution configurations for this target.
1090 config_platform_overrides = {}
1091 for config_name, c in spec['configurations'].iteritems():
1092 config_fullname = _ConfigFullName(config_name, c)
1093 platform = c.get('msvs_target_platform', _ConfigPlatform(c))
1094 fixed_config_fullname = '%s|%s' % (
1095 _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
1096 config_platform_overrides[config_fullname] = fixed_config_fullname
1097 # Create object for this project.
1098 obj = MSVSNew.MSVSProject(
1099 vcproj_rel_path,
1100 name=spec['target_name'],
1101 guid=projects[qualified_target]['guid'],
1102 dependencies=deps,
1103 config_platform_overrides=config_platform_overrides)
1104 # Store it to the list of objects.
1105 project_objs[qualified_target] = obj
1106 # Return project object.
1107 return obj
1108
1109
1110 def CalculateVariables(default_variables, params):
1111 """Generated variables that require params to be known."""
1112
1113 generator_flags = params.get('generator_flags', {})
1114
1115 # Select project file format version (if unset, default to auto detecting).
1116 msvs_version = \
1117 MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version',
1118 'auto'))
1119 # Stash msvs_version for later (so we don't have to probe the system twice).
1120 params['msvs_version'] = msvs_version
1121
1122 # Set a variable so conditions can be based on msvs_version.
1123 default_variables['MSVS_VERSION'] = msvs_version.ShortName()
1124
1125 # To determine processor word size on Windows, in addition to checking
1126 # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
1127 # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
1128 # contains the actual word size of the system when running thru WOW64).
1129 if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
1130 os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
1131 default_variables['MSVS_OS_BITS'] = 64
1132 else:
1133 default_variables['MSVS_OS_BITS'] = 32
1134
1135
1136 def GenerateOutput(target_list, target_dicts, data, params):
1137 """Generate .sln and .vcproj files.
1138
1139 This is the entry point for this generator.
1140 Arguments:
1141 target_list: List of target pairs: 'base/base.gyp:base'.
1142 target_dicts: Dict of target properties keyed on target pair.
1143 data: Dictionary containing per .gyp data.
1144 """
1145 global fixpath_prefix
1146
1147 options = params['options']
1148 generator_flags = params.get('generator_flags', {})
1149
1150 # Get the project file format version back out of where we stashed it in
1151 # GeneratorCalculatedVariables.
1152 msvs_version = params['msvs_version']
1153
1154 # Prepare the set of configurations.
1155 configs = set()
1156 for qualified_target in target_list:
1157 build_file = gyp.common.BuildFile(qualified_target)
1158 spec = target_dicts[qualified_target]
1159 for config_name, c in spec['configurations'].iteritems():
1160 configs.add(_ConfigFullName(config_name, c))
1161 configs = list(configs)
1162
1163 # Generate each project.
1164 projects = {}
1165 for qualified_target in target_list:
1166 build_file = gyp.common.BuildFile(qualified_target)
1167 spec = target_dicts[qualified_target]
1168 if spec['toolset'] != 'target':
1169 raise Exception(
1170 'Multiple toolsets not supported in msvs build (target %s)' %
1171 qualified_target)
1172 default_config = spec['configurations'][spec['default_configuration']]
1173 vcproj_filename = default_config.get('msvs_existing_vcproj')
1174 if not vcproj_filename:
1175 vcproj_filename = spec['target_name'] + options.suffix + '.vcproj'
1176 vcproj_path = os.path.join(os.path.split(build_file)[0], vcproj_filename)
1177 if options.generator_output:
1178 projectDirPath = os.path.dirname(os.path.abspath(vcproj_path))
1179 vcproj_path = os.path.join(options.generator_output, vcproj_path)
1180 fixpath_prefix = gyp.common.RelativePath(projectDirPath,
1181 os.path.dirname(vcproj_path))
1182 projects[qualified_target] = {
1183 'vcproj_path': vcproj_path,
1184 'guid': _GenerateProject(vcproj_path, build_file,
1185 spec, options, version=msvs_version),
1186 'spec': spec,
1187 }
1188
1189 fixpath_prefix = None
1190
1191 for build_file in data.keys():
1192 # Validate build_file extension
1193 if build_file[-4:] != '.gyp':
1194 continue
1195 sln_path = build_file[:-4] + options.suffix + '.sln'
1196 if options.generator_output:
1197 sln_path = os.path.join(options.generator_output, sln_path)
1198 #print 'Generating %s' % sln_path
1199 # Get projects in the solution, and their dependents.
1200 sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
1201 sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
1202 # Convert projects to Project Objects.
1203 project_objs = {}
1204 for p in sln_projects:
1205 _ProjectObject(sln_path, p, project_objs, projects)
1206 # Create folder hierarchy.
1207 root_entries = _GatherSolutionFolders(
1208 project_objs, flat=msvs_version.FlatSolution())
1209 # Create solution.
1210 sln = MSVSNew.MSVSSolution(sln_path,
1211 entries=root_entries,
1212 variants=configs,
1213 websiteProperties=False,
1214 version=msvs_version)
1215 sln.Write()
+0
-1047
third_party/gyp/pylib/gyp/generator/scons.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import gyp
7 import gyp.common
8 import gyp.SCons as SCons
9 import os.path
10 import pprint
11 import re
12
13
14 # TODO: remove when we delete the last WriteList() call in this module
15 WriteList = SCons.WriteList
16
17
18 generator_default_variables = {
19 'EXECUTABLE_PREFIX': '',
20 'EXECUTABLE_SUFFIX': '',
21 'STATIC_LIB_PREFIX': '${LIBPREFIX}',
22 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
23 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
24 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
25 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
26 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
27 'OS': 'linux',
28 'PRODUCT_DIR': '$TOP_BUILDDIR',
29 'SHARED_LIB_DIR': '$LIB_DIR',
30 'LIB_DIR': '$LIB_DIR',
31 'RULE_INPUT_ROOT': '${SOURCE.filebase}',
32 'RULE_INPUT_EXT': '${SOURCE.suffix}',
33 'RULE_INPUT_NAME': '${SOURCE.file}',
34 'RULE_INPUT_PATH': '${SOURCE.abspath}',
35 'CONFIGURATION_NAME': '${CONFIG_NAME}',
36 }
37
38 # Tell GYP how to process the input for us.
39 generator_handles_variants = True
40 generator_wants_absolute_build_file_paths = True
41
42
43 def FixPath(path, prefix):
44 if not os.path.isabs(path) and not path[0] == '$':
45 path = prefix + path
46 return path
47
48
49 header = """\
50 # This file is generated; do not edit.
51 """
52
53
54 _alias_template = """
55 if GetOption('verbose'):
56 _action = Action([%(action)s])
57 else:
58 _action = Action([%(action)s], %(message)s)
59 _outputs = env.Alias(
60 ['_%(target_name)s_action'],
61 %(inputs)s,
62 _action
63 )
64 env.AlwaysBuild(_outputs)
65 """
66
67 _run_as_template = """
68 if GetOption('verbose'):
69 _action = Action([%(action)s])
70 else:
71 _action = Action([%(action)s], %(message)s)
72 """
73
74 _run_as_template_suffix = """
75 _run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
76 env.Requires(_run_as_target, [
77 Alias('%(target_name)s'),
78 ])
79 env.AlwaysBuild(_run_as_target)
80 """
81
82 _command_template = """
83 if GetOption('verbose'):
84 _action = Action([%(action)s])
85 else:
86 _action = Action([%(action)s], %(message)s)
87 _outputs = env.Command(
88 %(outputs)s,
89 %(inputs)s,
90 _action
91 )
92 """
93
94 # This is copied from the default SCons action, updated to handle symlinks.
95 _copy_action_template = """
96 import shutil
97 import SCons.Action
98
99 def _copy_files_or_dirs_or_symlinks(dest, src):
100 SCons.Node.FS.invalidate_node_memos(dest)
101 if SCons.Util.is_List(src) and os.path.isdir(dest):
102 for file in src:
103 shutil.copy2(file, dest)
104 return 0
105 elif os.path.islink(src):
106 linkto = os.readlink(src)
107 os.symlink(linkto, dest)
108 return 0
109 elif os.path.isfile(src):
110 return shutil.copy2(src, dest)
111 else:
112 return shutil.copytree(src, dest, 1)
113
114 def _copy_files_or_dirs_or_symlinks_str(dest, src):
115 return 'Copying %s to %s ...' % (src, dest)
116
117 GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
118 _copy_files_or_dirs_or_symlinks_str,
119 convert=str)
120 """
121
122 _rule_template = """
123 %(name)s_additional_inputs = %(inputs)s
124 %(name)s_outputs = %(outputs)s
125 def %(name)s_emitter(target, source, env):
126 return (%(name)s_outputs, source + %(name)s_additional_inputs)
127 if GetOption('verbose'):
128 %(name)s_action = Action([%(action)s])
129 else:
130 %(name)s_action = Action([%(action)s], %(message)s)
131 env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
132 emitter=%(name)s_emitter)
133
134 _outputs = []
135 _processed_input_files = []
136 for infile in input_files:
137 if (type(infile) == type('')
138 and not os.path.isabs(infile)
139 and not infile[0] == '$'):
140 infile = %(src_dir)r + infile
141 if str(infile).endswith('.%(extension)s'):
142 _generated = env.%(name)s(infile)
143 env.Precious(_generated)
144 _outputs.append(_generated)
145 %(process_outputs_as_sources_line)s
146 else:
147 _processed_input_files.append(infile)
148 prerequisites.extend(_outputs)
149 input_files = _processed_input_files
150 """
151
152 _spawn_hack = """
153 import re
154 import SCons.Platform.posix
155 needs_shell = re.compile('["\\'><!^&]')
156 def gyp_spawn(sh, escape, cmd, args, env):
157 def strip_scons_quotes(arg):
158 if arg[0] == '"' and arg[-1] == '"':
159 return arg[1:-1]
160 return arg
161 stripped_args = [strip_scons_quotes(a) for a in args]
162 if needs_shell.search(' '.join(stripped_args)):
163 return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
164 else:
165 return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
166 """
167
168
169 def EscapeShellArgument(s):
170 """Quotes an argument so that it will be interpreted literally by a POSIX
171 shell. Taken from
172 http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
173 """
174 return "'" + s.replace("'", "'\\''") + "'"
175
176
177 def InvertNaiveSConsQuoting(s):
178 """SCons tries to "help" with quoting by naively putting double-quotes around
179 command-line arguments containing space or tab, which is broken for all
180 but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
181 if ' ' in s or '\t' in s:
182 # Then SCons will put double-quotes around this, so add our own quotes
183 # to close its quotes at the beginning and end.
184 s = '"' + s + '"'
185 return s
186
187
188 def EscapeSConsVariableExpansion(s):
189 """SCons has its own variable expansion syntax using $. We must escape it for
190 strings to be interpreted literally. For some reason this requires four
191 dollar signs, not two, even without the shell involved."""
192 return s.replace('$', '$$$$')
193
194
195 def EscapeCppDefine(s):
196 """Escapes a CPP define so that it will reach the compiler unaltered."""
197 s = EscapeShellArgument(s)
198 s = InvertNaiveSConsQuoting(s)
199 s = EscapeSConsVariableExpansion(s)
200 return s
201
202
203 def GenerateConfig(fp, config, indent='', src_dir=''):
204 """
205 Generates SCons dictionary items for a gyp configuration.
206
207 This provides the main translation between the (lower-case) gyp settings
208 keywords and the (upper-case) SCons construction variables.
209 """
210 var_mapping = {
211 'ASFLAGS' : 'asflags',
212 'CCFLAGS' : 'cflags',
213 'CFLAGS' : 'cflags_c',
214 'CXXFLAGS' : 'cflags_cc',
215 'CPPDEFINES' : 'defines',
216 'CPPPATH' : 'include_dirs',
217 # Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
218 # SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
219 # listing both here would case 'ldflags' to get appended to
220 # both, and then have it show up twice on the command line.
221 'LINKFLAGS' : 'ldflags',
222 }
223 postamble='\n%s],\n' % indent
224 for scons_var in sorted(var_mapping.keys()):
225 gyp_var = var_mapping[scons_var]
226 value = config.get(gyp_var)
227 if value:
228 if gyp_var in ('defines',):
229 value = [EscapeCppDefine(v) for v in value]
230 if gyp_var in ('include_dirs',):
231 if src_dir and not src_dir.endswith('/'):
232 src_dir += '/'
233 result = []
234 for v in value:
235 v = FixPath(v, src_dir)
236 # Force SCons to evaluate the CPPPATH directories at
237 # SConscript-read time, so delayed evaluation of $SRC_DIR
238 # doesn't point it to the --generator-output= directory.
239 result.append('env.Dir(%r)' % v)
240 value = result
241 else:
242 value = map(repr, value)
243 WriteList(fp,
244 value,
245 prefix=indent,
246 preamble='%s%s = [\n ' % (indent, scons_var),
247 postamble=postamble)
248
249
250 def GenerateSConscript(output_filename, spec, build_file, build_file_data):
251 """
252 Generates a SConscript file for a specific target.
253
254 This generates a SConscript file suitable for building any or all of
255 the target's configurations.
256
257 A SConscript file may be called multiple times to generate targets for
258 multiple configurations. Consequently, it needs to be ready to build
259 the target for any requested configuration, and therefore contains
260 information about the settings for all configurations (generated into
261 the SConscript file at gyp configuration time) as well as logic for
262 selecting (at SCons build time) the specific configuration being built.
263
264 The general outline of a generated SConscript file is:
265
266 -- Header
267
268 -- Import 'env'. This contains a $CONFIG_NAME construction
269 variable that specifies what configuration to build
270 (e.g. Debug, Release).
271
272 -- Configurations. This is a dictionary with settings for
273 the different configurations (Debug, Release) under which this
274 target can be built. The values in the dictionary are themselves
275 dictionaries specifying what construction variables should added
276 to the local copy of the imported construction environment
277 (Append), should be removed (FilterOut), and should outright
278 replace the imported values (Replace).
279
280 -- Clone the imported construction environment and update
281 with the proper configuration settings.
282
283 -- Initialize the lists of the targets' input files and prerequisites.
284
285 -- Target-specific actions and rules. These come after the
286 input file and prerequisite initializations because the
287 outputs of the actions and rules may affect the input file
288 list (process_outputs_as_sources) and get added to the list of
289 prerequisites (so that they're guaranteed to be executed before
290 building the target).
291
292 -- Call the Builder for the target itself.
293
294 -- Arrange for any copies to be made into installation directories.
295
296 -- Set up the {name} Alias (phony Node) for the target as the
297 primary handle for building all of the target's pieces.
298
299 -- Use env.Require() to make sure the prerequisites (explicitly
300 specified, but also including the actions and rules) are built
301 before the target itself.
302
303 -- Return the {name} Alias to the calling SConstruct file
304 so it can be added to the list of default targets.
305 """
306 scons_target = SCons.Target(spec)
307
308 gyp_dir = os.path.dirname(output_filename)
309 if not gyp_dir:
310 gyp_dir = '.'
311 gyp_dir = os.path.abspath(gyp_dir)
312
313 output_dir = os.path.dirname(output_filename)
314 src_dir = build_file_data['_DEPTH']
315 src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
316 subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
317 src_subdir = '$SRC_DIR/' + subdir
318 src_subdir_ = src_subdir + '/'
319
320 component_name = os.path.splitext(os.path.basename(build_file))[0]
321 target_name = spec['target_name']
322
323 if not os.path.exists(gyp_dir):
324 os.makedirs(gyp_dir)
325 fp = open(output_filename, 'w')
326 fp.write(header)
327
328 fp.write('\nimport os\n')
329 fp.write('\nImport("env")\n')
330
331 #
332 fp.write('\n')
333 fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
334 fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
335
336 #
337 for config in spec['configurations'].itervalues():
338 if config.get('scons_line_length'):
339 fp.write(_spawn_hack)
340 break
341
342 #
343 indent = ' ' * 12
344 fp.write('\n')
345 fp.write('configurations = {\n')
346 for config_name, config in spec['configurations'].iteritems():
347 fp.write(' \'%s\' : {\n' % config_name)
348
349 fp.write(' \'Append\' : dict(\n')
350 GenerateConfig(fp, config, indent, src_subdir)
351 libraries = spec.get('libraries')
352 if libraries:
353 WriteList(fp,
354 map(repr, libraries),
355 prefix=indent,
356 preamble='%sLIBS = [\n ' % indent,
357 postamble='\n%s],\n' % indent)
358 fp.write(' ),\n')
359
360 fp.write(' \'FilterOut\' : dict(\n' )
361 for key, var in config.get('scons_remove', {}).iteritems():
362 fp.write(' %s = %s,\n' % (key, repr(var)))
363 fp.write(' ),\n')
364
365 fp.write(' \'Replace\' : dict(\n' )
366 scons_settings = config.get('scons_variable_settings', {})
367 for key in sorted(scons_settings.keys()):
368 val = pprint.pformat(scons_settings[key])
369 fp.write(' %s = %s,\n' % (key, val))
370 if 'c++' in spec.get('link_languages', []):
371 fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
372 if config.get('scons_line_length'):
373 fp.write(' SPAWN = gyp_spawn,\n')
374 fp.write(' ),\n')
375
376 fp.write(' \'ImportExternal\' : [\n' )
377 for var in config.get('scons_import_variables', []):
378 fp.write(' %s,\n' % repr(var))
379 fp.write(' ],\n')
380
381 fp.write(' \'PropagateExternal\' : [\n' )
382 for var in config.get('scons_propagate_variables', []):
383 fp.write(' %s,\n' % repr(var))
384 fp.write(' ],\n')
385
386 fp.write(' },\n')
387 fp.write('}\n')
388
389 fp.write('\n'
390 'config = configurations[env[\'CONFIG_NAME\']]\n'
391 'env.Append(**config[\'Append\'])\n'
392 'env.FilterOut(**config[\'FilterOut\'])\n'
393 'env.Replace(**config[\'Replace\'])\n')
394
395 fp.write('\n'
396 '# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
397 '# Disable that so we can control it from cflags in gyp.\n'
398 '# Note that Scons itself is inconsistent with its -fPIC\n'
399 '# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
400 '# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
401 'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
402
403 fp.write('\n'
404 'for _var in config[\'ImportExternal\']:\n'
405 ' if _var in ARGUMENTS:\n'
406 ' env[_var] = ARGUMENTS[_var]\n'
407 ' elif _var in os.environ:\n'
408 ' env[_var] = os.environ[_var]\n'
409 'for _var in config[\'PropagateExternal\']:\n'
410 ' if _var in ARGUMENTS:\n'
411 ' env[_var] = ARGUMENTS[_var]\n'
412 ' elif _var in os.environ:\n'
413 ' env[\'ENV\'][_var] = os.environ[_var]\n')
414
415 fp.write('\n'
416 "env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
417
418 #
419 #fp.write("\nif env.has_key('CPPPATH'):\n")
420 #fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
421
422 variants = spec.get('variants', {})
423 for setting in sorted(variants.keys()):
424 if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
425 fp.write('\n')
426 fp.write(if_fmt % repr(setting.upper()))
427 fp.write(' env.AppendUnique(\n')
428 GenerateConfig(fp, variants[setting], indent, src_subdir)
429 fp.write(' )\n')
430
431 #
432 scons_target.write_input_files(fp)
433
434 fp.write('\n')
435 fp.write('target_files = []\n')
436 prerequisites = spec.get('scons_prerequisites', [])
437 fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
438
439 actions = spec.get('actions', [])
440 for action in actions:
441 a = ['cd', src_subdir, '&&'] + action['action']
442 message = action.get('message')
443 if message:
444 message = repr(message)
445 inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
446 outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
447 if outputs:
448 template = _command_template
449 else:
450 template = _alias_template
451 fp.write(template % {
452 'inputs' : pprint.pformat(inputs),
453 'outputs' : pprint.pformat(outputs),
454 'action' : pprint.pformat(a),
455 'message' : message,
456 'target_name': target_name,
457 })
458 if int(action.get('process_outputs_as_sources', 0)):
459 fp.write('input_files.extend(_outputs)\n')
460 fp.write('prerequisites.extend(_outputs)\n')
461 fp.write('target_files.extend(_outputs)\n')
462
463 rules = spec.get('rules', [])
464 for rule in rules:
465 name = rule['rule_name']
466 a = ['cd', src_subdir, '&&'] + rule['action']
467 message = rule.get('message')
468 if message:
469 message = repr(message)
470 if int(rule.get('process_outputs_as_sources', 0)):
471 poas_line = '_processed_input_files.extend(_generated)'
472 else:
473 poas_line = '_processed_input_files.append(infile)'
474 inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
475 outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
476 fp.write(_rule_template % {
477 'inputs' : pprint.pformat(inputs),
478 'outputs' : pprint.pformat(outputs),
479 'action' : pprint.pformat(a),
480 'extension' : rule['extension'],
481 'name' : name,
482 'message' : message,
483 'process_outputs_as_sources_line' : poas_line,
484 'src_dir' : src_subdir_,
485 })
486
487 scons_target.write_target(fp, src_subdir)
488
489 copies = spec.get('copies', [])
490 if copies:
491 fp.write(_copy_action_template)
492 for copy in copies:
493 destdir = None
494 files = None
495 try:
496 destdir = copy['destination']
497 except KeyError, e:
498 gyp.common.ExceptionAppend(
499 e,
500 "Required 'destination' key missing for 'copies' in %s." % build_file)
501 raise
502 try:
503 files = copy['files']
504 except KeyError, e:
505 gyp.common.ExceptionAppend(
506 e, "Required 'files' key missing for 'copies' in %s." % build_file)
507 raise
508 if not files:
509 # TODO: should probably add a (suppressible) warning;
510 # a null file list may be unintentional.
511 continue
512 if not destdir:
513 raise Exception(
514 "Required 'destination' key is empty for 'copies' in %s." % build_file)
515
516 fmt = ('\n'
517 '_outputs = env.Command(%s,\n'
518 ' %s,\n'
519 ' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
520 for f in copy['files']:
521 # Remove trailing separators so basename() acts like Unix basename and
522 # always returns the last element, whether a file or dir. Without this,
523 # only the contents, not the directory itself, are copied (and nothing
524 # might be copied if dest already exists, since scons thinks nothing needs
525 # to be done).
526 dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
527 f = FixPath(f, src_subdir_)
528 dest = FixPath(dest, src_subdir_)
529 fp.write(fmt % (repr(dest), repr(f)))
530 fp.write('target_files.extend(_outputs)\n')
531
532 if spec.get('run_as') or int(spec.get('test', 0)):
533 run_as = spec.get('run_as', {
534 'action' : ['$TARGET_NAME', '--gtest_print_time'],
535 })
536 action = run_as.get('action', [])
537 working_directory = run_as.get('working_directory')
538 if not working_directory:
539 working_directory = gyp_dir
540 else:
541 if not os.path.isabs(working_directory):
542 working_directory = os.path.normpath(os.path.join(gyp_dir,
543 working_directory))
544 if run_as.get('environment'):
545 for (key, val) in run_as.get('environment').iteritems():
546 action = ['%s="%s"' % (key, val)] + action
547 action = ['cd', '"%s"' % working_directory, '&&'] + action
548 fp.write(_run_as_template % {
549 'action' : pprint.pformat(action),
550 'message' : run_as.get('message', ''),
551 })
552
553 fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
554 fp.write(fmt % target_name)
555
556 dependencies = spec.get('scons_dependencies', [])
557 if dependencies:
558 WriteList(fp, dependencies, preamble='dependencies = [\n ',
559 postamble='\n]\n')
560 fp.write('env.Requires(target_files, dependencies)\n')
561 fp.write('env.Requires(gyp_target, dependencies)\n')
562 fp.write('for prerequisite in prerequisites:\n')
563 fp.write(' env.Requires(prerequisite, dependencies)\n')
564 fp.write('env.Requires(gyp_target, prerequisites)\n')
565
566 if spec.get('run_as', 0) or int(spec.get('test', 0)):
567 fp.write(_run_as_template_suffix % {
568 'target_name': target_name,
569 })
570
571 fp.write('Return("gyp_target")\n')
572
573 fp.close()
574
575
576 #############################################################################
577 # TEMPLATE BEGIN
578
579 _wrapper_template = """\
580
581 __doc__ = '''
582 Wrapper configuration for building this entire "solution,"
583 including all the specific targets in various *.scons files.
584 '''
585
586 import os
587 import sys
588
589 import SCons.Environment
590 import SCons.Util
591
592 def GetProcessorCount():
593 '''
594 Detects the number of CPUs on the system. Adapted form:
595 http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
596 '''
597 # Linux, Unix and Mac OS X:
598 if hasattr(os, 'sysconf'):
599 if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
600 # Linux and Unix or Mac OS X with python >= 2.5:
601 return os.sysconf('SC_NPROCESSORS_ONLN')
602 else: # Mac OS X with Python < 2.5:
603 return int(os.popen2("sysctl -n hw.ncpu")[1].read())
604 # Windows:
605 if os.environ.has_key('NUMBER_OF_PROCESSORS'):
606 return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
607 return 1 # Default
608
609 # Support PROGRESS= to show progress in different ways.
610 p = ARGUMENTS.get('PROGRESS')
611 if p == 'spinner':
612 Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
613 interval=5,
614 file=open('/dev/tty', 'w'))
615 elif p == 'name':
616 Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
617
618 # Set the default -j value based on the number of processors.
619 SetOption('num_jobs', GetProcessorCount() + 1)
620
621 # Have SCons use its cached dependency information.
622 SetOption('implicit_cache', 1)
623
624 # Only re-calculate MD5 checksums if a timestamp has changed.
625 Decider('MD5-timestamp')
626
627 # Since we set the -j value by default, suppress SCons warnings about being
628 # unable to support parallel build on versions of Python with no threading.
629 default_warnings = ['no-no-parallel-support']
630 SetOption('warn', default_warnings + GetOption('warn'))
631
632 AddOption('--mode', nargs=1, dest='conf_list', default=[],
633 action='append', help='Configuration to build.')
634
635 AddOption('--verbose', dest='verbose', default=False,
636 action='store_true', help='Verbose command-line output.')
637
638
639 #
640 sconscript_file_map = %(sconscript_files)s
641
642 class LoadTarget:
643 '''
644 Class for deciding if a given target sconscript is to be included
645 based on a list of included target names, optionally prefixed with '-'
646 to exclude a target name.
647 '''
648 def __init__(self, load):
649 '''
650 Initialize a class with a list of names for possible loading.
651
652 Arguments:
653 load: list of elements in the LOAD= specification
654 '''
655 self.included = set([c for c in load if not c.startswith('-')])
656 self.excluded = set([c[1:] for c in load if c.startswith('-')])
657
658 if not self.included:
659 self.included = set(['all'])
660
661 def __call__(self, target):
662 '''
663 Returns True if the specified target's sconscript file should be
664 loaded, based on the initialized included and excluded lists.
665 '''
666 return (target in self.included or
667 ('all' in self.included and not target in self.excluded))
668
669 if 'LOAD' in ARGUMENTS:
670 load = ARGUMENTS['LOAD'].split(',')
671 else:
672 load = []
673 load_target = LoadTarget(load)
674
675 sconscript_files = []
676 for target, sconscript in sconscript_file_map.iteritems():
677 if load_target(target):
678 sconscript_files.append(sconscript)
679
680
681 target_alias_list= []
682
683 conf_list = GetOption('conf_list')
684 if conf_list:
685 # In case the same --mode= value was specified multiple times.
686 conf_list = list(set(conf_list))
687 else:
688 conf_list = [%(default_configuration)r]
689
690 sconsbuild_dir = Dir(%(sconsbuild_dir)s)
691
692
693 def FilterOut(self, **kw):
694 kw = SCons.Environment.copy_non_reserved_keywords(kw)
695 for key, val in kw.items():
696 envval = self.get(key, None)
697 if envval is None:
698 # No existing variable in the environment, so nothing to delete.
699 continue
700
701 for vremove in val:
702 # Use while not if, so we can handle duplicates.
703 while vremove in envval:
704 envval.remove(vremove)
705
706 self[key] = envval
707
708 # TODO(sgk): SCons.Environment.Append() has much more logic to deal
709 # with various types of values. We should handle all those cases in here
710 # too. (If variable is a dict, etc.)
711
712
713 non_compilable_suffixes = {
714 'LINUX' : set([
715 '.bdic',
716 '.css',
717 '.dat',
718 '.fragment',
719 '.gperf',
720 '.h',
721 '.hh',
722 '.hpp',
723 '.html',
724 '.hxx',
725 '.idl',
726 '.in',
727 '.in0',
728 '.in1',
729 '.js',
730 '.mk',
731 '.rc',
732 '.sigs',
733 '',
734 ]),
735 'WINDOWS' : set([
736 '.h',
737 '.hh',
738 '.hpp',
739 '.dat',
740 '.idl',
741 '.in',
742 '.in0',
743 '.in1',
744 ]),
745 }
746
747 def compilable(env, file):
748 base, ext = os.path.splitext(str(file))
749 if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
750 return False
751 return True
752
753 def compilable_files(env, sources):
754 return [x for x in sources if compilable(env, x)]
755
756 def GypProgram(env, target, source, *args, **kw):
757 source = compilable_files(env, source)
758 result = env.Program(target, source, *args, **kw)
759 if env.get('INCREMENTAL'):
760 env.Precious(result)
761 return result
762
763 def GypTestProgram(env, target, source, *args, **kw):
764 source = compilable_files(env, source)
765 result = env.Program(target, source, *args, **kw)
766 if env.get('INCREMENTAL'):
767 env.Precious(*result)
768 return result
769
770 def GypLibrary(env, target, source, *args, **kw):
771 source = compilable_files(env, source)
772 result = env.Library(target, source, *args, **kw)
773 return result
774
775 def GypLoadableModule(env, target, source, *args, **kw):
776 source = compilable_files(env, source)
777 result = env.LoadableModule(target, source, *args, **kw)
778 return result
779
780 def GypStaticLibrary(env, target, source, *args, **kw):
781 source = compilable_files(env, source)
782 result = env.StaticLibrary(target, source, *args, **kw)
783 return result
784
785 def GypSharedLibrary(env, target, source, *args, **kw):
786 source = compilable_files(env, source)
787 result = env.SharedLibrary(target, source, *args, **kw)
788 if env.get('INCREMENTAL'):
789 env.Precious(result)
790 return result
791
792 def add_gyp_methods(env):
793 env.AddMethod(GypProgram)
794 env.AddMethod(GypTestProgram)
795 env.AddMethod(GypLibrary)
796 env.AddMethod(GypLoadableModule)
797 env.AddMethod(GypStaticLibrary)
798 env.AddMethod(GypSharedLibrary)
799
800 env.AddMethod(FilterOut)
801
802 env.AddMethod(compilable)
803
804
805 base_env = Environment(
806 tools = %(scons_tools)s,
807 INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
808 LIB_DIR='$TOP_BUILDDIR/lib',
809 OBJ_DIR='$TOP_BUILDDIR/obj',
810 SCONSBUILD_DIR=sconsbuild_dir.abspath,
811 SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
812 SRC_DIR=Dir(%(src_dir)r),
813 TARGET_PLATFORM='LINUX',
814 TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
815 LIBPATH=['$LIB_DIR'],
816 )
817
818 if not GetOption('verbose'):
819 base_env.SetDefault(
820 ARCOMSTR='Creating library $TARGET',
821 ASCOMSTR='Assembling $TARGET',
822 CCCOMSTR='Compiling $TARGET',
823 CONCATSOURCECOMSTR='ConcatSource $TARGET',
824 CXXCOMSTR='Compiling $TARGET',
825 LDMODULECOMSTR='Building loadable module $TARGET',
826 LINKCOMSTR='Linking $TARGET',
827 MANIFESTCOMSTR='Updating manifest for $TARGET',
828 MIDLCOMSTR='Compiling IDL $TARGET',
829 PCHCOMSTR='Precompiling $TARGET',
830 RANLIBCOMSTR='Indexing $TARGET',
831 RCCOMSTR='Compiling resource $TARGET',
832 SHCCCOMSTR='Compiling $TARGET',
833 SHCXXCOMSTR='Compiling $TARGET',
834 SHLINKCOMSTR='Linking $TARGET',
835 SHMANIFESTCOMSTR='Updating manifest for $TARGET',
836 )
837
838 add_gyp_methods(base_env)
839
840 for conf in conf_list:
841 env = base_env.Clone(CONFIG_NAME=conf)
842 SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
843 for sconscript in sconscript_files:
844 target_alias = env.SConscript(sconscript, exports=['env'])
845 if target_alias:
846 target_alias_list.extend(target_alias)
847
848 Default(Alias('all', target_alias_list))
849
850 help_fmt = '''
851 Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
852
853 Local command-line build options:
854 --mode=CONFIG Configuration to build:
855 --mode=Debug [default]
856 --mode=Release
857 --verbose Print actual executed command lines.
858
859 Supported command-line build variables:
860 LOAD=[module,...] Comma-separated list of components to load in the
861 dependency graph ('-' prefix excludes)
862 PROGRESS=type Display a progress indicator:
863 name: print each evaluated target name
864 spinner: print a spinner every 5 targets
865
866 The following TARGET names can also be used as LOAD= module names:
867
868 %%s
869 '''
870
871 if GetOption('help'):
872 def columnar_text(items, width=78, indent=2, sep=2):
873 result = []
874 colwidth = max(map(len, items)) + sep
875 cols = (width - indent) / colwidth
876 if cols < 1:
877 cols = 1
878 rows = (len(items) + cols - 1) / cols
879 indent = '%%*s' %% (indent, '')
880 sep = indent
881 for row in xrange(0, rows):
882 result.append(sep)
883 for i in xrange(row, len(items), rows):
884 result.append('%%-*s' %% (colwidth, items[i]))
885 sep = '\\n' + indent
886 result.append('\\n')
887 return ''.join(result)
888
889 load_list = set(sconscript_file_map.keys())
890 target_aliases = set(map(str, target_alias_list))
891
892 common = load_list and target_aliases
893 load_only = load_list - common
894 target_only = target_aliases - common
895 help_text = [help_fmt %% columnar_text(sorted(list(common)))]
896 if target_only:
897 fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
898 help_text.append(fmt %% columnar_text(sorted(list(target_only))))
899 if load_only:
900 fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
901 help_text.append(fmt %% columnar_text(sorted(list(load_only))))
902 Help(''.join(help_text))
903 """
904
905 # TEMPLATE END
906 #############################################################################
907
908
909 def GenerateSConscriptWrapper(build_file, build_file_data, name,
910 output_filename, sconscript_files,
911 default_configuration):
912 """
913 Generates the "wrapper" SConscript file (analogous to the Visual Studio
914 solution) that calls all the individual target SConscript files.
915 """
916 output_dir = os.path.dirname(output_filename)
917 src_dir = build_file_data['_DEPTH']
918 src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
919 if not src_dir_rel:
920 src_dir_rel = '.'
921 scons_settings = build_file_data.get('scons_settings', {})
922 sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
923 scons_tools = scons_settings.get('tools', ['default'])
924
925 sconscript_file_lines = ['dict(']
926 for target in sorted(sconscript_files.keys()):
927 sconscript = sconscript_files[target]
928 sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
929 sconscript_file_lines.append(')')
930
931 fp = open(output_filename, 'w')
932 fp.write(header)
933 fp.write(_wrapper_template % {
934 'default_configuration' : default_configuration,
935 'name' : name,
936 'scons_tools' : repr(scons_tools),
937 'sconsbuild_dir' : repr(sconsbuild_dir),
938 'sconscript_files' : '\n'.join(sconscript_file_lines),
939 'src_dir' : src_dir_rel,
940 })
941 fp.close()
942
943 # Generate the SConstruct file that invokes the wrapper SConscript.
944 dir, fname = os.path.split(output_filename)
945 SConstruct = os.path.join(dir, 'SConstruct')
946 fp = open(SConstruct, 'w')
947 fp.write(header)
948 fp.write('SConscript(%s)\n' % repr(fname))
949 fp.close()
950
951
952 def TargetFilename(target, build_file=None, output_suffix=''):
953 """Returns the .scons file name for the specified target.
954 """
955 if build_file is None:
956 build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
957 output_file = os.path.join(os.path.dirname(build_file),
958 target + output_suffix + '.scons')
959 return output_file
960
961
962 def GenerateOutput(target_list, target_dicts, data, params):
963 """
964 Generates all the output files for the specified targets.
965 """
966 options = params['options']
967
968 if options.generator_output:
969 def output_path(filename):
970 return filename.replace(params['cwd'], options.generator_output)
971 else:
972 def output_path(filename):
973 return filename
974
975 default_configuration = None
976
977 for qualified_target in target_list:
978 spec = target_dicts[qualified_target]
979 if spec['toolset'] != 'target':
980 raise Exception(
981 'Multiple toolsets not supported in scons build (target %s)' %
982 qualified_target)
983 scons_target = SCons.Target(spec)
984 if scons_target.is_ignored:
985 continue
986
987 # TODO: assumes the default_configuration of the first target
988 # non-Default target is the correct default for all targets.
989 # Need a better model for handle variation between targets.
990 if (not default_configuration and
991 spec['default_configuration'] != 'Default'):
992 default_configuration = spec['default_configuration']
993
994 build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
995 output_file = TargetFilename(target, build_file, options.suffix)
996 if options.generator_output:
997 output_file = output_path(output_file)
998
999 if not spec.has_key('libraries'):
1000 spec['libraries'] = []
1001
1002 # Add dependent static library targets to the 'libraries' value.
1003 deps = spec.get('dependencies', [])
1004 spec['scons_dependencies'] = []
1005 for d in deps:
1006 td = target_dicts[d]
1007 target_name = td['target_name']
1008 spec['scons_dependencies'].append("Alias('%s')" % target_name)
1009 if td['type'] in ('static_library', 'shared_library'):
1010 libname = td.get('product_name', target_name)
1011 spec['libraries'].append('lib' + libname)
1012 if td['type'] == 'loadable_module':
1013 prereqs = spec.get('scons_prerequisites', [])
1014 # TODO: parameterize with <(SHARED_LIBRARY_*) variables?
1015 td_target = SCons.Target(td)
1016 td_target.target_prefix = '${SHLIBPREFIX}'
1017 td_target.target_suffix = '${SHLIBSUFFIX}'
1018
1019 GenerateSConscript(output_file, spec, build_file, data[build_file])
1020
1021 if not default_configuration:
1022 default_configuration = 'Default'
1023
1024 for build_file in sorted(data.keys()):
1025 path, ext = os.path.splitext(build_file)
1026 if ext != '.gyp':
1027 continue
1028 output_dir, basename = os.path.split(path)
1029 output_filename = path + '_main' + options.suffix + '.scons'
1030
1031 all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
1032 sconscript_files = {}
1033 for t in all_targets:
1034 scons_target = SCons.Target(target_dicts[t])
1035 if scons_target.is_ignored:
1036 continue
1037 bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
1038 target_filename = TargetFilename(target, bf, options.suffix)
1039 tpath = gyp.common.RelativePath(target_filename, output_dir)
1040 sconscript_files[target] = tpath
1041
1042 output_filename = output_path(output_filename)
1043 if sconscript_files:
1044 GenerateSConscriptWrapper(build_file, data[build_file], basename,
1045 output_filename, sconscript_files,
1046 default_configuration)
+0
-1139
third_party/gyp/pylib/gyp/generator/xcode.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import filecmp
7 import gyp.common
8 import gyp.xcodeproj_file
9 import errno
10 import os
11 import posixpath
12 import re
13 import shutil
14 import subprocess
15 import tempfile
16
17
18 # Project files generated by this module will use _intermediate_var as a
19 # custom Xcode setting whose value is a DerivedSources-like directory that's
20 # project-specific and configuration-specific. The normal choice,
21 # DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
22 # as it is likely that multiple targets within a single project file will want
23 # to access the same set of generated files. The other option,
24 # PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
25 # it is not configuration-specific. INTERMEDIATE_DIR is defined as
26 # $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
27 _intermediate_var = 'INTERMEDIATE_DIR'
28
29 # SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
30 # targets that share the same BUILT_PRODUCTS_DIR.
31 _shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
32
33 generator_default_variables = {
34 'EXECUTABLE_PREFIX': '',
35 'EXECUTABLE_SUFFIX': '',
36 'STATIC_LIB_PREFIX': 'lib',
37 'SHARED_LIB_PREFIX': 'lib',
38 'STATIC_LIB_SUFFIX': '.a',
39 'SHARED_LIB_SUFFIX': '.dylib',
40 # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
41 # It is specific to each build environment. It is only guaranteed to exist
42 # and be constant within the context of a project, corresponding to a single
43 # input file. Some build environments may allow their intermediate directory
44 # to be shared on a wider scale, but this is not guaranteed.
45 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
46 'OS': 'mac',
47 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
48 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
49 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
50 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
51 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
52 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
53 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
54 'CONFIGURATION_NAME': '$(CONFIGURATION)',
55 }
56
57 # The Xcode-specific sections that hold paths.
58 generator_additional_path_sections = [
59 'mac_bundle_resources',
60 # 'mac_framework_dirs', input already handles _dirs endings.
61 ]
62
63 # The Xcode-specific keys that exist on targets and aren't moved down to
64 # configurations.
65 generator_additional_non_configuration_keys = [
66 'mac_bundle',
67 'mac_bundle_resources',
68 'xcode_create_dependents_test_runner',
69 ]
70
71 # We want to let any rules apply to files that are resources also.
72 generator_extra_sources_for_rules = [
73 'mac_bundle_resources',
74 ]
75
76
77 def CreateXCConfigurationList(configuration_names):
78 xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
79 for configuration_name in configuration_names:
80 xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
81 'name': configuration_name})
82 xccl.AppendProperty('buildConfigurations', xcbc)
83 xccl.SetProperty('defaultConfigurationName', configuration_names[0])
84 return xccl
85
86
87 class XcodeProject(object):
88 def __init__(self, gyp_path, path, build_file_dict):
89 self.gyp_path = gyp_path
90 self.path = path
91 self.project = gyp.xcodeproj_file.PBXProject(path=path)
92 projectDirPath = gyp.common.RelativePath(
93 os.path.dirname(os.path.abspath(self.gyp_path)),
94 os.path.dirname(path) or '.')
95 self.project.SetProperty('projectDirPath', projectDirPath)
96 self.project_file = \
97 gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
98 self.build_file_dict = build_file_dict
99
100 # TODO(mark): add destructor that cleans up self.path if created_dir is
101 # True and things didn't complete successfully. Or do something even
102 # better with "try"?
103 self.created_dir = False
104 try:
105 os.makedirs(self.path)
106 self.created_dir = True
107 except OSError, e:
108 if e.errno != errno.EEXIST:
109 raise
110
111 def Finalize1(self, xcode_targets, serialize_all_tests):
112 # Collect a list of all of the build configuration names used by the
113 # various targets in the file. It is very heavily advised to keep each
114 # target in an entire project (even across multiple project files) using
115 # the same set of configuration names.
116 configurations = []
117 for xct in self.project.GetProperty('targets'):
118 xccl = xct.GetProperty('buildConfigurationList')
119 xcbcs = xccl.GetProperty('buildConfigurations')
120 for xcbc in xcbcs:
121 name = xcbc.GetProperty('name')
122 if name not in configurations:
123 configurations.append(name)
124
125 # Replace the XCConfigurationList attached to the PBXProject object with
126 # a new one specifying all of the configuration names used by the various
127 # targets.
128 try:
129 xccl = CreateXCConfigurationList(configurations)
130 self.project.SetProperty('buildConfigurationList', xccl)
131 except:
132 import sys
133 sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
134 raise
135
136 # The need for this setting is explained above where _intermediate_var is
137 # defined. The comments below about wanting to avoid project-wide build
138 # settings apply here too, but this needs to be set on a project-wide basis
139 # so that files relative to the _intermediate_var setting can be displayed
140 # properly in the Xcode UI.
141 #
142 # Note that for configuration-relative files such as anything relative to
143 # _intermediate_var, for the purposes of UI tree view display, Xcode will
144 # only resolve the configuration name once, when the project file is
145 # opened. If the active build configuration is changed, the project file
146 # must be closed and reopened if it is desired for the tree view to update.
147 # This is filed as Apple radar 6588391.
148 xccl.SetBuildSetting(_intermediate_var,
149 '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
150 xccl.SetBuildSetting(_shared_intermediate_var,
151 '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
152
153 # Set user-specified project-wide build settings. This is intended to be
154 # used very sparingly. Really, almost everything should go into
155 # target-specific build settings sections. The project-wide settings are
156 # only intended to be used in cases where Xcode attempts to resolve
157 # variable references in a project context as opposed to a target context,
158 # such as when resolving sourceTree references while building up the tree
159 # tree view for UI display.
160 for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
161 xccl.SetBuildSetting(xck, xcv)
162
163 # Sort the targets based on how they appeared in the input.
164 # TODO(mark): Like a lot of other things here, this assumes internal
165 # knowledge of PBXProject - in this case, of its "targets" property.
166
167 # ordinary_targets are ordinary targets that are already in the project
168 # file. run_test_targets are the targets that run unittests and should be
169 # used for the Run All Tests target. support_targets are the action/rule
170 # targets used by GYP file targets, just kept for the assert check.
171 ordinary_targets = []
172 run_test_targets = []
173 support_targets = []
174
175 # targets is full list of targets in the project.
176 targets = []
177
178 # does the it define it's own "all"?
179 has_custom_all = False
180
181 # targets_for_all is the list of ordinary_targets that should be listed
182 # in this project's "All" target. It includes each non_runtest_target
183 # that does not have suppress_wildcard set.
184 targets_for_all = []
185
186 for target in self.build_file_dict['targets']:
187 target_name = target['target_name']
188 toolset = target['toolset']
189 qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
190 toolset)
191 xcode_target = xcode_targets[qualified_target]
192 # Make sure that the target being added to the sorted list is already in
193 # the unsorted list.
194 assert xcode_target in self.project._properties['targets']
195 targets.append(xcode_target)
196 ordinary_targets.append(xcode_target)
197 if xcode_target.support_target:
198 support_targets.append(xcode_target.support_target)
199 targets.append(xcode_target.support_target)
200
201 if not int(target.get('suppress_wildcard', False)):
202 targets_for_all.append(xcode_target)
203
204 if target_name.lower() == 'all':
205 has_custom_all = True;
206
207 # If this target has a 'run_as' attribute, or is a test, add its
208 # target to the targets, and (if it's a test) add it the to the
209 # test targets.
210 is_test = int(target.get('test', 0))
211 if target.get('run_as') or is_test:
212 # Make a target to run something. It should have one
213 # dependency, the parent xcode target.
214 xccl = CreateXCConfigurationList(configurations)
215 run_target = gyp.xcodeproj_file.PBXAggregateTarget({
216 'name': 'Run ' + target_name,
217 'productName': xcode_target.GetProperty('productName'),
218 'buildConfigurationList': xccl,
219 },
220 parent=self.project)
221 run_target.AddDependency(xcode_target)
222
223 # The test runner target has a build phase that executes the
224 # test, if this has the 'test' attribute. If the 'run_as' tag
225 # doesn't exist (meaning that this must be a test), then we
226 # define a default test command line.
227 command = target.get('run_as', {
228 'action': ['${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}']
229 })
230
231 script = ''
232 if command.get('working_directory'):
233 script = script + 'cd "%s"\n' % \
234 gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
235 command.get('working_directory'))
236
237 if command.get('environment'):
238 script = script + "\n".join(
239 ['export %s="%s"' %
240 (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
241 for (key, val) in command.get('environment').iteritems()]) + "\n"
242
243 # Some test end up using sockets, files on disk, etc. and can get
244 # confused if more then one test runs at a time. The generator
245 # flag 'xcode_serialize_all_test_runs' controls the forcing of all
246 # tests serially. It defaults to True. To get serial runs this
247 # little bit of python does the same as the linux flock utility to
248 # make sure only one runs at a time.
249 command_prefix = ''
250 if is_test and serialize_all_tests:
251 command_prefix = \
252 """python -c "import fcntl, subprocess, sys
253 file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
254 fcntl.flock(file.fileno(), fcntl.LOCK_EX)
255 sys.exit(subprocess.call(sys.argv[1:]))" """
256
257 # If we were unable to exec for some reason, we want to exit
258 # with an error, and fixup variable references to be shell
259 # syntax instead of xcode syntax.
260 script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
261 gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
262 gyp.common.EncodePOSIXShellList(command.get('action')))
263
264 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
265 'shellScript': script,
266 'showEnvVarsInLog': 0,
267 })
268 run_target.AppendProperty('buildPhases', ssbp)
269
270 # Add the run target to the project file.
271 targets.append(run_target)
272 if is_test:
273 run_test_targets.append(run_target)
274 xcode_target.test_runner = run_target
275
276
277 # Make sure that the list of targets being replaced is the same length as
278 # the one replacing it, but allow for the added test runner targets.
279 assert len(self.project._properties['targets']) == \
280 len(ordinary_targets) + len(support_targets)
281
282 self.project._properties['targets'] = targets
283
284 # Get rid of unnecessary levels of depth in groups like the Source group.
285 self.project.RootGroupsTakeOverOnlyChildren(True)
286
287 # Sort the groups nicely. Do this after sorting the targets, because the
288 # Products group is sorted based on the order of the targets.
289 self.project.SortGroups()
290
291 # Create an "All" target if there's more than one target in this project
292 # file and the project didn't define its own "All" target. Put a generated
293 # "All" target first so that people opening up the project for the first
294 # time will build everything by default.
295 if len(targets_for_all) > 1 and not has_custom_all:
296 xccl = CreateXCConfigurationList(configurations)
297 all_target = gyp.xcodeproj_file.PBXAggregateTarget(
298 {
299 'buildConfigurationList': xccl,
300 'name': 'All',
301 },
302 parent=self.project)
303
304 for target in targets_for_all:
305 all_target.AddDependency(target)
306
307 # TODO(mark): This is evil because it relies on internal knowledge of
308 # PBXProject._properties. It's important to get the "All" target first,
309 # though.
310 self.project._properties['targets'].insert(0, all_target)
311
312 # The same, but for run_test_targets.
313 if len(run_test_targets) > 1:
314 xccl = CreateXCConfigurationList(configurations)
315 run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
316 {
317 'buildConfigurationList': xccl,
318 'name': 'Run All Tests',
319 },
320 parent=self.project)
321 for run_test_target in run_test_targets:
322 run_all_tests_target.AddDependency(run_test_target)
323
324 # Insert after the "All" target, which must exist if there is more than
325 # one run_test_target.
326 self.project._properties['targets'].insert(1, run_all_tests_target)
327
328 def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
329 # Finalize2 needs to happen in a separate step because the process of
330 # updating references to other projects depends on the ordering of targets
331 # within remote project files. Finalize1 is responsible for sorting duty,
332 # and once all project files are sorted, Finalize2 can come in and update
333 # these references.
334
335 # To support making a "test runner" target that will run all the tests
336 # that are direct dependents of any given target, we look for
337 # xcode_create_dependents_test_runner being set on an Aggregate target,
338 # and generate a second target that will run the tests runners found under
339 # the marked target.
340 for bf_tgt in self.build_file_dict['targets']:
341 if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
342 tgt_name = bf_tgt['target_name']
343 toolset = bf_tgt['toolset']
344 qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
345 tgt_name, toolset)
346 xcode_target = xcode_targets[qualified_target]
347 if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
348 # Collect all the run test targets.
349 all_run_tests = []
350 pbxtds = xcode_target.GetProperty('dependencies')
351 for pbxtd in pbxtds:
352 pbxcip = pbxtd.GetProperty('targetProxy')
353 dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
354 target_dict = xcode_target_to_target_dict[dependency_xct]
355 if target_dict and int(target_dict.get('test', 0)):
356 assert dependency_xct.test_runner
357 all_run_tests.append(dependency_xct.test_runner)
358
359 # Directly depend on all the runners as they depend on the target
360 # that builds them.
361 if len(all_run_tests) > 0:
362 run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
363 'name': 'Run %s Tests' % tgt_name,
364 'productName': tgt_name,
365 },
366 parent=self.project)
367 for run_test_target in all_run_tests:
368 run_all_target.AddDependency(run_test_target)
369
370 # Insert the test runner after the related target.
371 idx = self.project._properties['targets'].index(xcode_target)
372 self.project._properties['targets'].insert(idx + 1, run_all_target)
373
374 # Update all references to other projects, to make sure that the lists of
375 # remote products are complete. Otherwise, Xcode will fill them in when
376 # it opens the project file, which will result in unnecessary diffs.
377 # TODO(mark): This is evil because it relies on internal knowledge of
378 # PBXProject._other_pbxprojects.
379 for other_pbxproject in self.project._other_pbxprojects.keys():
380 self.project.AddOrGetProjectReference(other_pbxproject)
381
382 self.project.SortRemoteProductReferences()
383
384 # Give everything an ID.
385 self.project_file.ComputeIDs()
386
387 # Make sure that no two objects in the project file have the same ID. If
388 # multiple objects wind up with the same ID, upon loading the file, Xcode
389 # will only recognize one object (the last one in the file?) and the
390 # results are unpredictable.
391 self.project_file.EnsureNoIDCollisions()
392
393 def Write(self):
394 # Write the project file to a temporary location first. Xcode watches for
395 # changes to the project file and presents a UI sheet offering to reload
396 # the project when it does change. However, in some cases, especially when
397 # multiple projects are open or when Xcode is busy, things don't work so
398 # seamlessly. Sometimes, Xcode is able to detect that a project file has
399 # changed but can't unload it because something else is referencing it.
400 # To mitigate this problem, and to avoid even having Xcode present the UI
401 # sheet when an open project is rewritten for inconsequential changes, the
402 # project file is written to a temporary file in the xcodeproj directory
403 # first. The new temporary file is then compared to the existing project
404 # file, if any. If they differ, the new file replaces the old; otherwise,
405 # the new project file is simply deleted. Xcode properly detects a file
406 # being renamed over an open project file as a change and so it remains
407 # able to present the "project file changed" sheet under this system.
408 # Writing to a temporary file first also avoids the possible problem of
409 # Xcode rereading an incomplete project file.
410 (output_fd, new_pbxproj_path) = \
411 tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
412 dir=self.path)
413
414 try:
415 output_file = os.fdopen(output_fd, 'wb')
416
417 self.project_file.Print(output_file)
418 output_file.close()
419
420 pbxproj_path = os.path.join(self.path, 'project.pbxproj')
421
422 same = False
423 try:
424 same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
425 except OSError, e:
426 if e.errno != errno.ENOENT:
427 raise
428
429 if same:
430 # The new file is identical to the old one, just get rid of the new
431 # one.
432 os.unlink(new_pbxproj_path)
433 else:
434 # The new file is different from the old one, or there is no old one.
435 # Rename the new file to the permanent name.
436 #
437 # tempfile.mkstemp uses an overly restrictive mode, resulting in a
438 # file that can only be read by the owner, regardless of the umask.
439 # There's no reason to not respect the umask here, which means that
440 # an extra hoop is required to fetch it and reset the new file's mode.
441 #
442 # No way to get the umask without setting a new one? Set a safe one
443 # and then set it back to the old value.
444 umask = os.umask(077)
445 os.umask(umask)
446
447 os.chmod(new_pbxproj_path, 0666 & ~umask)
448 os.rename(new_pbxproj_path, pbxproj_path)
449
450 except Exception:
451 # Don't leave turds behind. In fact, if this code was responsible for
452 # creating the xcodeproj directory, get rid of that too.
453 os.unlink(new_pbxproj_path)
454 if self.created_dir:
455 shutil.rmtree(self.path, True)
456 raise
457
458
459 cached_xcode_version = None
460 def InstalledXcodeVersion():
461 """Fetches the installed version of Xcode, returns empty string if it is
462 unable to figure it out."""
463
464 global cached_xcode_version
465 if not cached_xcode_version is None:
466 return cached_xcode_version
467
468 # Default to an empty string
469 cached_xcode_version = ''
470
471 # Collect the xcodebuild's version information.
472 try:
473 import subprocess
474 cmd = ['/usr/bin/xcodebuild', '-version']
475 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
476 xcodebuild_version_info = proc.communicate()[0]
477 # Any error, return empty string
478 if proc.returncode:
479 xcodebuild_version_info = ''
480 except OSError:
481 # We failed to launch the tool
482 xcodebuild_version_info = ''
483
484 # Pull out the Xcode version itself.
485 match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
486 if match_line:
487 cached_xcode_version = match_line.group(1)
488 # Done!
489 return cached_xcode_version
490
491
492 def AddSourceToTarget(source, pbxp, xct):
493 # TODO(mark): Perhaps this can be made a little bit fancier.
494 source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
495 basename = posixpath.basename(source)
496 (root, ext) = posixpath.splitext(basename)
497 if ext != '':
498 ext = ext[1:].lower()
499
500 if ext in source_extensions:
501 xct.SourcesPhase().AddFile(source)
502 else:
503 # Files that aren't added to a sources build phase can still go into
504 # the project file, just not as part of a build phase.
505 pbxp.AddOrGetFileInRootGroup(source)
506
507
508 def AddResourceToTarget(resource, pbxp, xct):
509 # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
510 # where it's used.
511 xct.ResourcesPhase().AddFile(resource)
512
513
514 _xcode_variable_re = re.compile('(\$\((.*?)\))')
515 def ExpandXcodeVariables(string, expansions):
516 """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
517
518 In some rare cases, it is appropriate to expand Xcode variables when a
519 project file is generated. For any substring $(VAR) in string, if VAR is a
520 key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
521 Any $(VAR) substring in string for which VAR is not a key in the expansions
522 dict will remain in the returned string.
523 """
524
525 matches = _xcode_variable_re.findall(string)
526 if matches == None:
527 return string
528
529 matches.reverse()
530 for match in matches:
531 (to_replace, variable) = match
532 if not variable in expansions:
533 continue
534
535 replacement = expansions[variable]
536 string = re.sub(re.escape(to_replace), replacement, string)
537
538 return string
539
540
541 def EscapeXCodeArgument(s):
542 """We must escape the arguments that we give to XCode so that it knows not to
543 split on spaces and to respect backslash and quote literals."""
544 s = s.replace('\\', '\\\\')
545 s = s.replace('"', '\\"')
546 return '"' + s + '"'
547
548
549 def GenerateOutput(target_list, target_dicts, data, params):
550 options = params['options']
551 generator_flags = params.get('generator_flags', {})
552 parallel_builds = generator_flags.get('xcode_parallel_builds', True)
553 serialize_all_tests = \
554 generator_flags.get('xcode_serialize_all_test_runs', True)
555 xcode_projects = {}
556 for build_file, build_file_dict in data.iteritems():
557 (build_file_root, build_file_ext) = os.path.splitext(build_file)
558 if build_file_ext != '.gyp':
559 continue
560 xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
561 if options.generator_output:
562 xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
563 xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
564 xcode_projects[build_file] = xcp
565 pbxp = xcp.project
566
567 if parallel_builds:
568 pbxp.SetProperty('attributes',
569 {'BuildIndependentTargetsInParallel': 'YES'})
570
571 main_group = pbxp.GetProperty('mainGroup')
572 build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
573 main_group.AppendChild(build_group)
574 for included_file in build_file_dict['included_files']:
575 build_group.AddOrGetFileByPath(included_file, False)
576
577 xcode_targets = {}
578 xcode_target_to_target_dict = {}
579 for qualified_target in target_list:
580 [build_file, target_name, toolset] = \
581 gyp.common.ParseQualifiedTarget(qualified_target)
582
583 spec = target_dicts[qualified_target]
584 if spec['toolset'] != 'target':
585 raise Exception(
586 'Multiple toolsets not supported in xcode build (target %s)' %
587 qualified_target)
588 configuration_names = [spec['default_configuration']]
589 for configuration_name in sorted(spec['configurations'].keys()):
590 if configuration_name not in configuration_names:
591 configuration_names.append(configuration_name)
592 xcp = xcode_projects[build_file]
593 pbxp = xcp.project
594
595 # Set up the configurations for the target according to the list of names
596 # supplied.
597 xccl = CreateXCConfigurationList(configuration_names)
598
599 # Create an XCTarget subclass object for the target. We use the type
600 # with "+bundle" appended if the target has "mac_bundle" set.
601 _types = {
602 'executable': 'com.apple.product-type.tool',
603 'loadable_module': 'com.apple.product-type.library.dynamic',
604 'shared_library': 'com.apple.product-type.library.dynamic',
605 'static_library': 'com.apple.product-type.library.static',
606 'executable+bundle': 'com.apple.product-type.application',
607 'loadable_module+bundle': 'com.apple.product-type.bundle',
608 'shared_library+bundle': 'com.apple.product-type.framework',
609 }
610
611 target_properties = {
612 'buildConfigurationList': xccl,
613 'name': target_name,
614 }
615
616 type = spec['type']
617 is_bundle = int(spec.get('mac_bundle', 0))
618 if type != 'none':
619 type_bundle_key = type
620 if is_bundle:
621 type_bundle_key += '+bundle'
622 xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
623 try:
624 target_properties['productType'] = _types[type_bundle_key]
625 except KeyError, e:
626 gyp.common.ExceptionAppend(e, "-- unknown product type while "
627 "writing target %s" % target_name)
628 raise
629 else:
630 xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
631
632 target_product_name = spec.get('product_name')
633 if target_product_name is not None:
634 target_properties['productName'] = target_product_name
635
636 xct = xctarget_type(target_properties, parent=pbxp,
637 force_outdir=spec.get('product_dir'),
638 force_prefix=spec.get('product_prefix'),
639 force_extension=spec.get('product_extension'))
640 pbxp.AppendProperty('targets', xct)
641 xcode_targets[qualified_target] = xct
642 xcode_target_to_target_dict[xct] = spec
643
644 # Xcode does not have a distinct type for loadable_modules that are pure
645 # BSD targets (ie-unbundled). It uses the same setup as a shared_library
646 # but the mach-o type is explictly set in the settings. So before we do
647 # anything else, for this one case, we stuff in that one setting. This
648 # would allow the other data in the spec to change it if need be.
649 if type == 'loadable_module' and not is_bundle:
650 xccl.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
651
652 spec_actions = spec.get('actions', [])
653 spec_rules = spec.get('rules', [])
654
655 # Xcode has some "issues" with checking dependencies for the "Compile
656 # sources" step with any source files/headers generated by actions/rules.
657 # To work around this, if a target is building anything directly (not
658 # type "none"), then a second target as used to run the GYP actions/rules
659 # and is made a dependency of this target. This way the work is done
660 # before the dependency checks for what should be recompiled.
661 support_xct = None
662 if type != 'none' and (spec_actions or spec_rules):
663 support_xccl = CreateXCConfigurationList(configuration_names);
664 support_target_properties = {
665 'buildConfigurationList': support_xccl,
666 'name': target_name + ' Support',
667 }
668 if target_product_name:
669 support_target_properties['productName'] = \
670 target_product_name + ' Support'
671 support_xct = \
672 gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
673 parent=pbxp)
674 pbxp.AppendProperty('targets', support_xct)
675 xct.AddDependency(support_xct)
676 # Hang the support target off the main target so it can be tested/found
677 # by the generator during Finalize.
678 xct.support_target = support_xct
679
680 prebuild_index = 0
681
682 # Add custom shell script phases for "actions" sections.
683 for action in spec_actions:
684 # There's no need to write anything into the script to ensure that the
685 # output directories already exist, because Xcode will look at the
686 # declared outputs and automatically ensure that they exist for us.
687
688 # Do we have a message to print when this action runs?
689 message = action.get('message')
690 if message:
691 message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
692 else:
693 message = ''
694
695 # Turn the list into a string that can be passed to a shell.
696 action_string = gyp.common.EncodePOSIXShellList(action['action'])
697
698 # Convert Xcode-type variable references to sh-compatible environment
699 # variable references.
700 message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
701 action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
702 action_string)
703
704 script = ''
705 # Include the optional message
706 if message_sh:
707 script += message_sh + '\n'
708 # Be sure the script runs in exec, and that if exec fails, the script
709 # exits signalling an error.
710 script += 'exec ' + action_string_sh + '\nexit 1\n'
711 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
712 'inputPaths': action['inputs'],
713 'name': 'Action "' + action['action_name'] + '"',
714 'outputPaths': action['outputs'],
715 'shellScript': script,
716 'showEnvVarsInLog': 0,
717 })
718
719 if support_xct:
720 support_xct.AppendProperty('buildPhases', ssbp)
721 else:
722 # TODO(mark): this assumes too much knowledge of the internals of
723 # xcodeproj_file; some of these smarts should move into xcodeproj_file
724 # itself.
725 xct._properties['buildPhases'].insert(prebuild_index, ssbp)
726 prebuild_index = prebuild_index + 1
727
728 # TODO(mark): Should verify that at most one of these is specified.
729 if int(action.get('process_outputs_as_sources', False)):
730 for output in action['outputs']:
731 AddSourceToTarget(output, pbxp, xct)
732
733 if int(action.get('process_outputs_as_mac_bundle_resources', False)):
734 for output in action['outputs']:
735 AddResourceToTarget(output, pbxp, xct)
736
737 # tgt_mac_bundle_resources holds the list of bundle resources so
738 # the rule processing can check against it.
739 if is_bundle:
740 tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
741 else:
742 tgt_mac_bundle_resources = []
743
744 # Add custom shell script phases driving "make" for "rules" sections.
745 #
746 # Xcode's built-in rule support is almost powerful enough to use directly,
747 # but there are a few significant deficiencies that render them unusable.
748 # There are workarounds for some of its inadequacies, but in aggregate,
749 # the workarounds added complexity to the generator, and some workarounds
750 # actually require input files to be crafted more carefully than I'd like.
751 # Consequently, until Xcode rules are made more capable, "rules" input
752 # sections will be handled in Xcode output by shell script build phases
753 # performed prior to the compilation phase.
754 #
755 # The following problems with Xcode rules were found. The numbers are
756 # Apple radar IDs. I hope that these shortcomings are addressed, I really
757 # liked having the rules handled directly in Xcode during the period that
758 # I was prototyping this.
759 #
760 # 6588600 Xcode compiles custom script rule outputs too soon, compilation
761 # fails. This occurs when rule outputs from distinct inputs are
762 # interdependent. The only workaround is to put rules and their
763 # inputs in a separate target from the one that compiles the rule
764 # outputs. This requires input file cooperation and it means that
765 # process_outputs_as_sources is unusable.
766 # 6584932 Need to declare that custom rule outputs should be excluded from
767 # compilation. A possible workaround is to lie to Xcode about a
768 # rule's output, giving it a dummy file it doesn't know how to
769 # compile. The rule action script would need to touch the dummy.
770 # 6584839 I need a way to declare additional inputs to a custom rule.
771 # A possible workaround is a shell script phase prior to
772 # compilation that touches a rule's primary input files if any
773 # would-be additional inputs are newer than the output. Modifying
774 # the source tree - even just modification times - feels dirty.
775 # 6564240 Xcode "custom script" build rules always dump all environment
776 # variables. This is a low-prioroty problem and is not a
777 # show-stopper.
778 rules_by_ext = {}
779 for rule in spec_rules:
780 rules_by_ext[rule['extension']] = rule
781
782 # First, some definitions:
783 #
784 # A "rule source" is a file that was listed in a target's "sources"
785 # list and will have a rule applied to it on the basis of matching the
786 # rule's "extensions" attribute. Rule sources are direct inputs to
787 # rules.
788 #
789 # Rule definitions may specify additional inputs in their "inputs"
790 # attribute. These additional inputs are used for dependency tracking
791 # purposes.
792 #
793 # A "concrete output" is a rule output with input-dependent variables
794 # resolved. For example, given a rule with:
795 # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
796 # if the target's "sources" list contained "one.ext" and "two.ext",
797 # the "concrete output" for rule input "two.ext" would be "two.cc". If
798 # a rule specifies multiple outputs, each input file that the rule is
799 # applied to will have the same number of concrete outputs.
800 #
801 # If any concrete outputs are outdated or missing relative to their
802 # corresponding rule_source or to any specified additional input, the
803 # rule action must be performed to generate the concrete outputs.
804
805 # concrete_outputs_by_rule_source will have an item at the same index
806 # as the rule['rule_sources'] that it corresponds to. Each item is a
807 # list of all of the concrete outputs for the rule_source.
808 concrete_outputs_by_rule_source = []
809
810 # concrete_outputs_all is a flat list of all concrete outputs that this
811 # rule is able to produce, given the known set of input files
812 # (rule_sources) that apply to it.
813 concrete_outputs_all = []
814
815 # messages & actions are keyed by the same indices as rule['rule_sources']
816 # and concrete_outputs_by_rule_source. They contain the message and
817 # action to perform after resolving input-dependent variables. The
818 # message is optional, in which case None is stored for each rule source.
819 messages = []
820 actions = []
821
822 for rule_source in rule.get('rule_sources', []):
823 rule_source_basename = posixpath.basename(rule_source)
824 (rule_source_root, rule_source_ext) = \
825 posixpath.splitext(rule_source_basename)
826
827 # These are the same variable names that Xcode uses for its own native
828 # rule support. Because Xcode's rule engine is not being used, they
829 # need to be expanded as they are written to the makefile.
830 rule_input_dict = {
831 'INPUT_FILE_BASE': rule_source_root,
832 'INPUT_FILE_SUFFIX': rule_source_ext,
833 'INPUT_FILE_NAME': rule_source_basename,
834 'INPUT_FILE_PATH': rule_source,
835 }
836
837 concrete_outputs_for_this_rule_source = []
838 for output in rule.get('outputs', []):
839 # Fortunately, Xcode and make both use $(VAR) format for their
840 # variables, so the expansion is the only transformation necessary.
841 # Any remaning $(VAR)-type variables in the string can be given
842 # directly to make, which will pick up the correct settings from
843 # what Xcode puts into the environment.
844 concrete_output = ExpandXcodeVariables(output, rule_input_dict)
845 concrete_outputs_for_this_rule_source.append(concrete_output)
846
847 # Add all concrete outputs to the project.
848 pbxp.AddOrGetFileInRootGroup(concrete_output)
849
850 concrete_outputs_by_rule_source.append( \
851 concrete_outputs_for_this_rule_source)
852 concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
853
854 # TODO(mark): Should verify that at most one of these is specified.
855 if int(rule.get('process_outputs_as_sources', False)):
856 for output in concrete_outputs_for_this_rule_source:
857 AddSourceToTarget(output, pbxp, xct)
858
859 # If the file came from the mac_bundle_resources list or if the rule
860 # is marked to process outputs as bundle resource, do so.
861 was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
862 if was_mac_bundle_resource or \
863 int(rule.get('process_outputs_as_mac_bundle_resources', False)):
864 for output in concrete_outputs_for_this_rule_source:
865 AddResourceToTarget(output, pbxp, xct)
866
867 # Do we have a message to print when this rule runs?
868 message = rule.get('message')
869 if message:
870 message = gyp.common.EncodePOSIXShellArgument(message)
871 message = '@echo note: ' + ExpandXcodeVariables(message,
872 rule_input_dict)
873 messages.append(message)
874
875 # Turn the list into a string that can be passed to a shell.
876 action_string = gyp.common.EncodePOSIXShellList(rule['action'])
877
878 action = ExpandXcodeVariables(action_string, rule_input_dict)
879 actions.append(action)
880
881 if len(concrete_outputs_all) > 0:
882 # TODO(mark): There's a possibilty for collision here. Consider
883 # target "t" rule "A_r" and target "t_A" rule "r".
884 makefile_name = '%s_%s.make' % (target_name, rule['rule_name'])
885 makefile_path = os.path.join(xcode_projects[build_file].path,
886 makefile_name)
887 # TODO(mark): try/close? Write to a temporary file and swap it only
888 # if it's got changes?
889 makefile = open(makefile_path, 'wb')
890
891 # make will build the first target in the makefile by default. By
892 # convention, it's called "all". List all (or at least one)
893 # concrete output for each rule source as a prerequisite of the "all"
894 # target.
895 makefile.write('all: \\\n')
896 for concrete_output_index in \
897 xrange(0, len(concrete_outputs_by_rule_source)):
898 # Only list the first (index [0]) concrete output of each input
899 # in the "all" target. Otherwise, a parallel make (-j > 1) would
900 # attempt to process each input multiple times simultaneously.
901 # Otherwise, "all" could just contain the entire list of
902 # concrete_outputs_all.
903 concrete_output = \
904 concrete_outputs_by_rule_source[concrete_output_index][0]
905 if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
906 eol = ''
907 else:
908 eol = ' \\'
909 makefile.write(' %s%s\n' % (concrete_output, eol))
910
911 for (rule_source, concrete_outputs, message, action) in \
912 zip(rule['rule_sources'], concrete_outputs_by_rule_source,
913 messages, actions):
914 makefile.write('\n')
915
916 # Add a rule that declares it can build each concrete output of a
917 # rule source. Collect the names of the directories that are
918 # required.
919 concrete_output_dirs = []
920 for concrete_output_index in xrange(0, len(concrete_outputs)):
921 concrete_output = concrete_outputs[concrete_output_index]
922 if concrete_output_index == 0:
923 bol = ''
924 else:
925 bol = ' '
926 makefile.write('%s%s \\\n' % (bol, concrete_output))
927
928 concrete_output_dir = posixpath.dirname(concrete_output)
929 if (concrete_output_dir and
930 concrete_output_dir not in concrete_output_dirs):
931 concrete_output_dirs.append(concrete_output_dir)
932
933 makefile.write(' : \\\n')
934
935 # The prerequisites for this rule are the rule source itself and
936 # the set of additional rule inputs, if any.
937 prerequisites = [rule_source]
938 prerequisites.extend(rule.get('inputs', []))
939 for prerequisite_index in xrange(0, len(prerequisites)):
940 prerequisite = prerequisites[prerequisite_index]
941 if prerequisite_index == len(prerequisites) - 1:
942 eol = ''
943 else:
944 eol = ' \\'
945 makefile.write(' %s%s\n' % (prerequisite, eol))
946
947 # Make sure that output directories exist before executing the rule
948 # action.
949 # TODO(mark): quote the list of concrete_output_dirs.
950 if len(concrete_output_dirs) > 0:
951 makefile.write('\tmkdir -p %s\n' % ' '.join(concrete_output_dirs))
952
953 # The rule message and action have already had the necessary variable
954 # substitutions performed.
955 if message:
956 makefile.write('\t%s\n' % message)
957 makefile.write('\t%s\n' % action)
958
959 makefile.close()
960
961 # It might be nice to ensure that needed output directories exist
962 # here rather than in each target in the Makefile, but that wouldn't
963 # work if there ever was a concrete output that had an input-dependent
964 # variable anywhere other than in the leaf position.
965
966 # Don't declare any inputPaths or outputPaths. If they're present,
967 # Xcode will provide a slight optimization by only running the script
968 # phase if any output is missing or outdated relative to any input.
969 # Unfortunately, it will also assume that all outputs are touched by
970 # the script, and if the outputs serve as files in a compilation
971 # phase, they will be unconditionally rebuilt. Since make might not
972 # rebuild everything that could be declared here as an output, this
973 # extra compilation activity is unnecessary. With inputPaths and
974 # outputPaths not supplied, make will always be called, but it knows
975 # enough to not do anything when everything is up-to-date.
976
977 # To help speed things up, pass -j COUNT to make so it does some work
978 # in parallel. Don't use ncpus because Xcode will build ncpus targets
979 # in parallel and if each target happens to have a rules step, there
980 # would be ncpus^2 things going. With a machine that has 2 quad-core
981 # Xeons, a build can quickly run out of processes based on
982 # scheduling/other tasks, and randomly failing builds are no good.
983 script = \
984 """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
985 if [ "${JOB_COUNT}" -gt 4 ]; then
986 JOB_COUNT=4
987 fi
988 exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
989 exit 1
990 """ % makefile_name
991 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
992 'name': 'Rule "' + rule['rule_name'] + '"',
993 'shellScript': script,
994 'showEnvVarsInLog': 0,
995 })
996
997 if support_xct:
998 support_xct.AppendProperty('buildPhases', ssbp)
999 else:
1000 # TODO(mark): this assumes too much knowledge of the internals of
1001 # xcodeproj_file; some of these smarts should move into xcodeproj_file
1002 # itself.
1003 xct._properties['buildPhases'].insert(prebuild_index, ssbp)
1004 prebuild_index = prebuild_index + 1
1005
1006 # Extra rule inputs also go into the project file. Concrete outputs were
1007 # already added when they were computed.
1008 for group in ['inputs', 'inputs_excluded']:
1009 for item in rule.get(group, []):
1010 pbxp.AddOrGetFileInRootGroup(item)
1011
1012 # Add "sources".
1013 for source in spec.get('sources', []):
1014 (source_root, source_extension) = posixpath.splitext(source)
1015 if source_extension[1:] not in rules_by_ext:
1016 # AddSourceToTarget will add the file to a root group if it's not
1017 # already there.
1018 AddSourceToTarget(source, pbxp, xct)
1019 else:
1020 pbxp.AddOrGetFileInRootGroup(source)
1021
1022 # Add "mac_bundle_resources" if it's a bundle of any type.
1023 if is_bundle:
1024 for resource in tgt_mac_bundle_resources:
1025 (resource_root, resource_extension) = posixpath.splitext(resource)
1026 if resource_extension[1:] not in rules_by_ext:
1027 AddResourceToTarget(resource, pbxp, xct)
1028 else:
1029 pbxp.AddOrGetFileInRootGroup(resource)
1030
1031 # Add "copies".
1032 for copy_group in spec.get('copies', []):
1033 pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
1034 'name': 'Copy to ' + copy_group['destination']
1035 },
1036 parent=xct)
1037 dest = copy_group['destination']
1038 if dest[0] not in ('/', '$'):
1039 # Relative paths are relative to $(SRCROOT).
1040 dest = '$(SRCROOT)/' + dest
1041 pbxcp.SetDestination(dest)
1042
1043 # TODO(mark): The usual comment about this knowing too much about
1044 # gyp.xcodeproj_file internals applies.
1045 xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
1046
1047 for file in copy_group['files']:
1048 pbxcp.AddFile(file)
1049
1050 # Excluded files can also go into the project file.
1051 for key in ['sources', 'mac_bundle_resources']:
1052 excluded_key = key + '_excluded'
1053 for item in spec.get(excluded_key, []):
1054 pbxp.AddOrGetFileInRootGroup(item)
1055
1056 # So can "inputs" and "outputs" sections of "actions" groups.
1057 for action in spec.get('actions', []):
1058 groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
1059 for group in groups:
1060 for item in action.get(group, []):
1061 # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
1062 # sources.
1063 if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
1064 pbxp.AddOrGetFileInRootGroup(item)
1065
1066 for postbuild in spec.get('postbuilds', []):
1067 action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
1068 script = 'exec ' + action_string_sh + '\nexit 1\n'
1069 ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
1070 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
1071 'shellScript': script,
1072 'showEnvVarsInLog': 0,
1073 })
1074 xct.AppendProperty('buildPhases', ssbp)
1075
1076 # Add dependencies before libraries, because adding a dependency may imply
1077 # adding a library. It's preferable to keep dependencies listed first
1078 # during a link phase so that they can override symbols that would
1079 # otherwise be provided by libraries, which will usually include system
1080 # libraries. On some systems, ld is finicky and even requires the
1081 # libraries to be ordered in such a way that unresolved symbols in
1082 # earlier-listed libraries may only be resolved by later-listed libraries.
1083 # The Mac linker doesn't work that way, but other platforms do, and so
1084 # their linker invocations need to be constructed in this way. There's
1085 # no compelling reason for Xcode's linker invocations to differ.
1086
1087 if 'dependencies' in spec:
1088 for dependency in spec['dependencies']:
1089 xct.AddDependency(xcode_targets[dependency])
1090 # The support project also gets the dependencies (in case they are
1091 # needed for the actions/rules to work).
1092 if support_xct:
1093 support_xct.AddDependency(xcode_targets[dependency])
1094
1095 if 'libraries' in spec:
1096 for library in spec['libraries']:
1097 xct.FrameworksPhase().AddFile(library)
1098 # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
1099 # I wish Xcode handled this automatically.
1100 # TODO(mark): this logic isn't right. There are certain directories
1101 # that are always searched, we should check to see if the library is
1102 # in one of those directories, and if not, we should do the
1103 # AppendBuildSetting thing.
1104 if not posixpath.isabs(library) and not library.startswith('$'):
1105 # TODO(mark): Need to check to see if library_dir is already in
1106 # LIBRARY_SEARCH_PATHS.
1107 library_dir = posixpath.dirname(library)
1108 xct.AppendBuildSetting('LIBRARY_SEARCH_PATHS', library_dir)
1109
1110 for configuration_name in configuration_names:
1111 configuration = spec['configurations'][configuration_name]
1112 xcbc = xct.ConfigurationNamed(configuration_name)
1113 for include_dir in configuration.get('mac_framework_dirs', []):
1114 xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
1115 for include_dir in configuration.get('include_dirs', []):
1116 xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
1117 if 'defines' in configuration:
1118 for define in configuration['defines']:
1119 set_define = EscapeXCodeArgument(define)
1120 xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
1121 if 'xcode_settings' in configuration:
1122 for xck, xcv in configuration['xcode_settings'].iteritems():
1123 xcbc.SetBuildSetting(xck, xcv)
1124
1125 build_files = []
1126 for build_file, build_file_dict in data.iteritems():
1127 if build_file.endswith('.gyp'):
1128 build_files.append(build_file)
1129
1130 for build_file in build_files:
1131 xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
1132
1133 for build_file in build_files:
1134 xcode_projects[build_file].Finalize2(xcode_targets,
1135 xcode_target_to_target_dict)
1136
1137 for build_file in build_files:
1138 xcode_projects[build_file].Write()
+0
-2220
third_party/gyp/pylib/gyp/input.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 from compiler.ast import Const
7 from compiler.ast import Dict
8 from compiler.ast import Discard
9 from compiler.ast import List
10 from compiler.ast import Module
11 from compiler.ast import Node
12 from compiler.ast import Stmt
13 import compiler
14 import copy
15 import gyp.common
16 import optparse
17 import os.path
18 import re
19 import shlex
20 import subprocess
21 import sys
22
23
24 # A list of types that are treated as linkable.
25 linkable_types = ['executable', 'shared_library', 'loadable_module']
26
27 # A list of sections that contain links to other targets.
28 dependency_sections = ['dependencies', 'export_dependent_settings']
29
30 # base_path_sections is a list of sections defined by GYP that contain
31 # pathnames. The generators can provide more keys, the two lists are merged
32 # into path_sections, but you should call IsPathSection instead of using either
33 # list directly.
34 base_path_sections = [
35 'destination',
36 'files',
37 'include_dirs',
38 'inputs',
39 'libraries',
40 'outputs',
41 'sources',
42 ]
43 path_sections = []
44
45
46 def IsPathSection(section):
47 # If section ends in one of these characters, it's applied to a section
48 # without the trailing characters. '/' is notably absent from this list,
49 # because there's no way for a regular expression to be treated as a path.
50 while section[-1:] in ('=', '+', '?', '!'):
51 section = section[0:-1]
52
53 if section in path_sections or \
54 section.endswith('_dir') or section.endswith('_dirs') or \
55 section.endswith('_file') or section.endswith('_files') or \
56 section.endswith('_path') or section.endswith('_paths'):
57 return True
58 return False
59
60
61 # base_non_configuraiton_keys is a list of key names that belong in the target
62 # itself and should not be propagated into its configurations. It is merged
63 # with a list that can come from the generator to
64 # create non_configuration_keys.
65 base_non_configuration_keys = [
66 # Sections that must exist inside targets and not configurations.
67 'actions',
68 'configurations',
69 'copies',
70 'default_configuration',
71 'dependencies',
72 'dependencies_original',
73 'link_languages',
74 'libraries',
75 'postbuilds',
76 'product_dir',
77 'product_extension',
78 'product_name',
79 'product_prefix',
80 'rules',
81 'run_as',
82 'sources',
83 'suppress_wildcard',
84 'target_name',
85 'test',
86 'toolset',
87 'toolsets',
88 'type',
89 'variants',
90
91 # Sections that can be found inside targets or configurations, but that
92 # should not be propagated from targets into their configurations.
93 'variables',
94 ]
95 non_configuration_keys = []
96
97 # Keys that do not belong inside a configuration dictionary.
98 invalid_configuration_keys = [
99 'actions',
100 'all_dependent_settings',
101 'configurations',
102 'dependencies',
103 'direct_dependent_settings',
104 'libraries',
105 'link_settings',
106 'sources',
107 'target_name',
108 'type',
109 ]
110
111 # Controls how the generator want the build file paths.
112 absolute_build_file_paths = False
113
114 # Controls whether or not the generator supports multiple toolsets.
115 multiple_toolsets = False
116
117
118 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
119 """Return a list of all build files included into build_file_path.
120
121 The returned list will contain build_file_path as well as all other files
122 that it included, either directly or indirectly. Note that the list may
123 contain files that were included into a conditional section that evaluated
124 to false and was not merged into build_file_path's dict.
125
126 aux_data is a dict containing a key for each build file or included build
127 file. Those keys provide access to dicts whose "included" keys contain
128 lists of all other files included by the build file.
129
130 included should be left at its default None value by external callers. It
131 is used for recursion.
132
133 The returned list will not contain any duplicate entries. Each build file
134 in the list will be relative to the current directory.
135 """
136
137 if included == None:
138 included = []
139
140 if build_file_path in included:
141 return included
142
143 included.append(build_file_path)
144
145 for included_build_file in aux_data[build_file_path].get('included', []):
146 GetIncludedBuildFiles(included_build_file, aux_data, included)
147
148 return included
149
150
151 def CheckedEval(file_contents):
152 """Return the eval of a gyp file.
153
154 The gyp file is restricted to dictionaries and lists only, and
155 repeated keys are not allowed.
156
157 Note that this is slower than eval() is.
158 """
159
160 ast = compiler.parse(file_contents)
161 assert isinstance(ast, Module)
162 c1 = ast.getChildren()
163 assert c1[0] is None
164 assert isinstance(c1[1], Stmt)
165 c2 = c1[1].getChildren()
166 assert isinstance(c2[0], Discard)
167 c3 = c2[0].getChildren()
168 assert len(c3) == 1
169 return CheckNode(c3[0], [])
170
171
172 def CheckNode(node, keypath):
173 if isinstance(node, Dict):
174 c = node.getChildren()
175 dict = {}
176 for n in range(0, len(c), 2):
177 assert isinstance(c[n], Const)
178 key = c[n].getChildren()[0]
179 if key in dict:
180 raise KeyError, "Key '" + key + "' repeated at level " + \
181 repr(len(keypath) + 1) + " with key path '" + \
182 '.'.join(keypath) + "'"
183 kp = list(keypath) # Make a copy of the list for descending this node.
184 kp.append(key)
185 dict[key] = CheckNode(c[n + 1], kp)
186 return dict
187 elif isinstance(node, List):
188 c = node.getChildren()
189 children = []
190 for index, child in enumerate(c):
191 kp = list(keypath) # Copy list.
192 kp.append(repr(index))
193 children.append(CheckNode(child, kp))
194 return children
195 elif isinstance(node, Const):
196 return node.getChildren()[0]
197 else:
198 raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
199 "': " + repr(node)
200
201
202 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
203 is_target, check):
204 if build_file_path in data:
205 return data[build_file_path]
206
207 if os.path.exists(build_file_path):
208 build_file_contents = open(build_file_path).read()
209 else:
210 raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
211
212 build_file_data = None
213 try:
214 if check:
215 build_file_data = CheckedEval(build_file_contents)
216 else:
217 build_file_data = eval(build_file_contents, {'__builtins__': None},
218 None)
219 except SyntaxError, e:
220 e.filename = build_file_path
221 raise
222 except Exception, e:
223 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
224 raise
225
226 data[build_file_path] = build_file_data
227 aux_data[build_file_path] = {}
228
229 # Scan for includes and merge them in.
230 try:
231 if is_target:
232 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
233 aux_data, variables, includes, check)
234 else:
235 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
236 aux_data, variables, None, check)
237 except Exception, e:
238 gyp.common.ExceptionAppend(e,
239 'while reading includes of ' + build_file_path)
240 raise
241
242 return build_file_data
243
244
245 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
246 variables, includes, check):
247 includes_list = []
248 if includes != None:
249 includes_list.extend(includes)
250 if 'includes' in subdict:
251 for include in subdict['includes']:
252 # "include" is specified relative to subdict_path, so compute the real
253 # path to include by appending the provided "include" to the directory
254 # in which subdict_path resides.
255 relative_include = \
256 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
257 includes_list.append(relative_include)
258 # Unhook the includes list, it's no longer needed.
259 del subdict['includes']
260
261 # Merge in the included files.
262 for include in includes_list:
263 if not 'included' in aux_data[subdict_path]:
264 aux_data[subdict_path]['included'] = []
265 aux_data[subdict_path]['included'].append(include)
266
267 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include)
268
269 MergeDicts(subdict,
270 LoadOneBuildFile(include, data, aux_data, variables, None,
271 False, check),
272 subdict_path, include)
273
274 # Recurse into subdictionaries.
275 for k, v in subdict.iteritems():
276 if v.__class__ == dict:
277 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
278 None, check)
279 elif v.__class__ == list:
280 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
281 check)
282
283
284 # This recurses into lists so that it can look for dicts.
285 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
286 variables, check):
287 for item in sublist:
288 if item.__class__ == dict:
289 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
290 variables, None, check)
291 elif item.__class__ == list:
292 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
293 variables, check)
294
295 # Processes toolsets in all the targets. This recurses into condition entries
296 # since they can contain toolsets as well.
297 def ProcessToolsetsInDict(data):
298 if 'targets' in data:
299 target_list = data['targets']
300 new_target_list = []
301 for target in target_list:
302 global multiple_toolsets
303 if multiple_toolsets:
304 toolsets = target.get('toolsets', ['target'])
305 else:
306 toolsets = ['target']
307 if len(toolsets) > 0:
308 # Optimization: only do copies if more than one toolset is specified.
309 for build in toolsets[1:]:
310 new_target = copy.deepcopy(target)
311 new_target['toolset'] = build
312 new_target_list.append(new_target)
313 target['toolset'] = toolsets[0]
314 new_target_list.append(target)
315 data['targets'] = new_target_list
316 if 'conditions' in data:
317 for condition in data['conditions']:
318 if isinstance(condition, list):
319 for condition_dict in condition[1:]:
320 ProcessToolsetsInDict(condition_dict)
321
322
323 # TODO(mark): I don't love this name. It just means that it's going to load
324 # a build file that contains targets and is expected to provide a targets dict
325 # that contains the targets...
326 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
327 depth, check):
328 global absolute_build_file_paths
329
330 # If depth is set, predefine the DEPTH variable to be a relative path from
331 # this build file's directory to the directory identified by depth.
332 if depth:
333 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
334 # temporary measure. This should really be addressed by keeping all paths
335 # in POSIX until actual project generation.
336 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
337 if d == '':
338 variables['DEPTH'] = '.'
339 else:
340 variables['DEPTH'] = d.replace('\\', '/')
341
342 # If the generator needs absolue paths, then do so.
343 if absolute_build_file_paths:
344 build_file_path = os.path.abspath(build_file_path)
345
346 if build_file_path in data['target_build_files']:
347 # Already loaded.
348 return
349 data['target_build_files'].add(build_file_path)
350
351 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
352 "Loading Target Build File '%s'" % build_file_path)
353
354 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
355 includes, True, check)
356
357 # Store DEPTH for later use in generators.
358 build_file_data['_DEPTH'] = depth
359
360 # Set up the included_files key indicating which .gyp files contributed to
361 # this target dict.
362 if 'included_files' in build_file_data:
363 raise KeyError, build_file_path + ' must not contain included_files key'
364
365 included = GetIncludedBuildFiles(build_file_path, aux_data)
366 build_file_data['included_files'] = []
367 for included_file in included:
368 # included_file is relative to the current directory, but it needs to
369 # be made relative to build_file_path's directory.
370 included_relative = \
371 gyp.common.RelativePath(included_file,
372 os.path.dirname(build_file_path))
373 build_file_data['included_files'].append(included_relative)
374
375 ProcessToolsetsInDict(build_file_data)
376
377 # Apply "pre"/"early" variable expansions and condition evaluations.
378 ProcessVariablesAndConditionsInDict(build_file_data, False, variables,
379 build_file_path)
380
381 # Look at each project's target_defaults dict, and merge settings into
382 # targets.
383 if 'target_defaults' in build_file_data:
384 index = 0
385 if 'targets' in build_file_data:
386 while index < len(build_file_data['targets']):
387 # This procedure needs to give the impression that target_defaults is
388 # used as defaults, and the individual targets inherit from that.
389 # The individual targets need to be merged into the defaults. Make
390 # a deep copy of the defaults for each target, merge the target dict
391 # as found in the input file into that copy, and then hook up the
392 # copy with the target-specific data merged into it as the replacement
393 # target dict.
394 old_target_dict = build_file_data['targets'][index]
395 new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
396 MergeDicts(new_target_dict, old_target_dict,
397 build_file_path, build_file_path)
398 build_file_data['targets'][index] = new_target_dict
399 index = index + 1
400 else:
401 raise Exception, \
402 "Unable to find targets in build file %s" % build_file_path
403
404 # No longer needed.
405 del build_file_data['target_defaults']
406
407 # Look for dependencies. This means that dependency resolution occurs
408 # after "pre" conditionals and variable expansion, but before "post" -
409 # in other words, you can't put a "dependencies" section inside a "post"
410 # conditional within a target.
411
412 if 'targets' in build_file_data:
413 for target_dict in build_file_data['targets']:
414 if 'dependencies' not in target_dict:
415 continue
416 for dependency in target_dict['dependencies']:
417 other_build_file = \
418 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
419 try:
420 LoadTargetBuildFile(other_build_file, data, aux_data, variables,
421 includes, depth, check)
422 except Exception, e:
423 gyp.common.ExceptionAppend(
424 e, 'while loading dependencies of %s' % build_file_path)
425 raise
426
427 return data
428
429
430 # Look for the bracket that matches the first bracket seen in a
431 # string, and return the start and end as a tuple. For example, if
432 # the input is something like "<(foo <(bar)) blah", then it would
433 # return (1, 13), indicating the entire string except for the leading
434 # "<" and trailing " blah".
435 def FindEnclosingBracketGroup(input):
436 brackets = { '}': '{',
437 ']': '[',
438 ')': '(', }
439 stack = []
440 count = 0
441 start = -1
442 for char in input:
443 if char in brackets.values():
444 stack.append(char)
445 if start == -1:
446 start = count
447 if char in brackets.keys():
448 try:
449 last_bracket = stack.pop()
450 except IndexError:
451 return (-1, -1)
452 if last_bracket != brackets[char]:
453 return (-1, -1)
454 if len(stack) == 0:
455 return (start, count + 1)
456 count = count + 1
457 return (-1, -1)
458
459
460 canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$')
461
462
463 def IsStrCanonicalInt(string):
464 """Returns True if |string| is in its canonical integer form.
465
466 The canonical form is such that str(int(string)) == string.
467 """
468 if not isinstance(string, str) or not canonical_int_re.match(string):
469 return False
470
471 return True
472
473
474 early_variable_re = re.compile('(?P<replace>(?P<type><((!?@?)|\|)?)'
475 '\((?P<is_array>\s*\[?)'
476 '(?P<content>.*?)(\]?)\))')
477 late_variable_re = re.compile('(?P<replace>(?P<type>>((!?@?)|\|)?)'
478 '\((?P<is_array>\s*\[?)'
479 '(?P<content>.*?)(\]?)\))')
480
481 # Global cache of results from running commands so they don't have to be run
482 # more then once.
483 cached_command_results = {}
484
485
486 def FixupPlatformCommand(cmd):
487 if sys.platform == 'win32':
488 if type(cmd) == list:
489 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
490 else:
491 cmd = re.sub('^cat ', 'type ', cmd)
492 return cmd
493
494
495 def ExpandVariables(input, is_late, variables, build_file):
496 # Look for the pattern that gets expanded into variables
497 if not is_late:
498 variable_re = early_variable_re
499 expansion_symbol = '<'
500 else:
501 variable_re = late_variable_re
502 expansion_symbol = '>'
503
504 input_str = str(input)
505 # Do a quick scan to determine if an expensive regex search is warranted.
506 if expansion_symbol in input_str:
507 # Get the entire list of matches as a list of MatchObject instances.
508 # (using findall here would return strings instead of MatchObjects).
509 matches = [match for match in variable_re.finditer(input_str)]
510 else:
511 matches = None
512
513 output = input_str
514 if matches:
515 # Reverse the list of matches so that replacements are done right-to-left.
516 # That ensures that earlier replacements won't mess up the string in a
517 # way that causes later calls to find the earlier substituted text instead
518 # of what's intended for replacement.
519 matches.reverse()
520 for match_group in matches:
521 match = match_group.groupdict()
522 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
523 "Matches: %s" % repr(match))
524 # match['replace'] is the substring to look for, match['type']
525 # is the character code for the replacement type (< > <! >! <| >| <@
526 # >@ <!@ >!@), match['is_array'] contains a '[' for command
527 # arrays, and match['content'] is the name of the variable (< >)
528 # or command to run (<! >!).
529
530 # run_command is true if a ! variant is used.
531 run_command = '!' in match['type']
532
533 # file_list is true if a | variant is used.
534 file_list = '|' in match['type']
535
536 # Capture these now so we can adjust them later.
537 replace_start = match_group.start('replace')
538 replace_end = match_group.end('replace')
539
540 # Find the ending paren, and re-evaluate the contained string.
541 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
542
543 # Adjust the replacement range to match the entire command
544 # found by FindEnclosingBracketGroup (since the variable_re
545 # probably doesn't match the entire command if it contained
546 # nested variables).
547 replace_end = replace_start + c_end
548
549 # Find the "real" replacement, matching the appropriate closing
550 # paren, and adjust the replacement start and end.
551 replacement = input_str[replace_start:replace_end]
552
553 # Figure out what the contents of the variable parens are.
554 contents_start = replace_start + c_start + 1
555 contents_end = replace_end - 1
556 contents = input_str[contents_start:contents_end]
557
558 # Do filter substitution now for <|().
559 # Admittedly, this is different than the evaluation order in other
560 # contexts. However, since filtration has no chance to run on <|(),
561 # this seems like the only obvious way to give them access to filters.
562 if file_list:
563 processed_variables = copy.deepcopy(variables)
564 ProcessListFiltersInDict(contents, processed_variables)
565 # Recurse to expand variables in the contents
566 contents = ExpandVariables(contents, is_late,
567 processed_variables, build_file)
568 else:
569 # Recurse to expand variables in the contents
570 contents = ExpandVariables(contents, is_late, variables, build_file)
571
572 # Strip off leading/trailing whitespace so that variable matches are
573 # simpler below (and because they are rarely needed).
574 contents = contents.strip()
575
576 # expand_to_list is true if an @ variant is used. In that case,
577 # the expansion should result in a list. Note that the caller
578 # is to be expecting a list in return, and not all callers do
579 # because not all are working in list context. Also, for list
580 # expansions, there can be no other text besides the variable
581 # expansion in the input string.
582 expand_to_list = '@' in match['type'] and input_str == replacement
583
584 if run_command or file_list:
585 # Find the build file's directory, so commands can be run or file lists
586 # generated relative to it.
587 build_file_dir = os.path.dirname(build_file)
588 if build_file_dir == '':
589 # If build_file is just a leaf filename indicating a file in the
590 # current directory, build_file_dir might be an empty string. Set
591 # it to None to signal to subprocess.Popen that it should run the
592 # command in the current directory.
593 build_file_dir = None
594
595 # Support <|(listfile.txt ...) which generates a file
596 # containing items from a gyp list, generated at gyp time.
597 # This works around actions/rules which have more inputs than will
598 # fit on the command line.
599 if file_list:
600 if type(contents) == list:
601 contents_list = contents
602 else:
603 contents_list = contents.split(' ')
604 replacement = contents_list[0]
605 path = replacement
606 if not os.path.isabs(path):
607 path = os.path.join(build_file_dir, path)
608 f = gyp.common.WriteOnDiff(path)
609 for i in contents_list[1:]:
610 f.write('%s\n' % i)
611 f.close()
612
613 elif run_command:
614 use_shell = True
615 if match['is_array']:
616 contents = eval(contents)
617 use_shell = False
618
619 # Check for a cached value to avoid executing commands, or generating
620 # file lists more than once.
621 # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
622 # possible that the command being invoked depends on the current
623 # directory. For that case the syntax needs to be extended so that the
624 # directory is also used in cache_key (it becomes a tuple).
625 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
626 # someone could author a set of GYP files where each time the command
627 # is invoked it produces different output by design. When the need
628 # arises, the syntax should be extended to support no caching off a
629 # command's output so it is run every time.
630 cache_key = str(contents)
631 cached_value = cached_command_results.get(cache_key, None)
632 if cached_value is None:
633 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
634 "Executing command '%s' in directory '%s'" %
635 (contents,build_file_dir))
636
637 # Fix up command with platform specific workarounds.
638 contents = FixupPlatformCommand(contents)
639 p = subprocess.Popen(contents, shell=use_shell,
640 stdout=subprocess.PIPE,
641 stderr=subprocess.PIPE,
642 stdin=subprocess.PIPE,
643 cwd=build_file_dir)
644
645 (p_stdout, p_stderr) = p.communicate('')
646
647 if p.wait() != 0 or p_stderr:
648 sys.stderr.write(p_stderr)
649 # Simulate check_call behavior, since check_call only exists
650 # in python 2.5 and later.
651 raise Exception("Call to '%s' returned exit status %d." %
652 (contents, p.returncode))
653 replacement = p_stdout.rstrip()
654
655 cached_command_results[cache_key] = replacement
656 else:
657 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
658 "Had cache value for command '%s' in directory '%s'" %
659 (contents,build_file_dir))
660 replacement = cached_value
661
662 else:
663 if not contents in variables:
664 raise KeyError, 'Undefined variable ' + contents + \
665 ' in ' + build_file
666 replacement = variables[contents]
667
668 if isinstance(replacement, list):
669 for item in replacement:
670 if not isinstance(item, str) and not isinstance(item, int):
671 raise TypeError, 'Variable ' + contents + \
672 ' must expand to a string or list of strings; ' + \
673 'list contains a ' + \
674 item.__class__.__name__
675 # Run through the list and handle variable expansions in it. Since
676 # the list is guaranteed not to contain dicts, this won't do anything
677 # with conditions sections.
678 ProcessVariablesAndConditionsInList(replacement, is_late, variables,
679 build_file)
680 elif not isinstance(replacement, str) and \
681 not isinstance(replacement, int):
682 raise TypeError, 'Variable ' + contents + \
683 ' must expand to a string or list of strings; ' + \
684 'found a ' + replacement.__class__.__name__
685
686 if expand_to_list:
687 # Expanding in list context. It's guaranteed that there's only one
688 # replacement to do in |input_str| and that it's this replacement. See
689 # above.
690 if isinstance(replacement, list):
691 # If it's already a list, make a copy.
692 output = replacement[:]
693 else:
694 # Split it the same way sh would split arguments.
695 output = shlex.split(str(replacement))
696 else:
697 # Expanding in string context.
698 encoded_replacement = ''
699 if isinstance(replacement, list):
700 # When expanding a list into string context, turn the list items
701 # into a string in a way that will work with a subprocess call.
702 #
703 # TODO(mark): This isn't completely correct. This should
704 # call a generator-provided function that observes the
705 # proper list-to-argument quoting rules on a specific
706 # platform instead of just calling the POSIX encoding
707 # routine.
708 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
709 else:
710 encoded_replacement = replacement
711
712 output = output[:replace_start] + str(encoded_replacement) + \
713 output[replace_end:]
714 # Prepare for the next match iteration.
715 input_str = output
716
717 # Look for more matches now that we've replaced some, to deal with
718 # expanding local variables (variables defined in the same
719 # variables block as this one).
720 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
721 "Found output %s, recursing." % repr(output))
722 if isinstance(output, list):
723 new_output = []
724 for item in output:
725 new_output.append(ExpandVariables(item, is_late, variables, build_file))
726 output = new_output
727 else:
728 output = ExpandVariables(output, is_late, variables, build_file)
729
730 # Convert all strings that are canonically-represented integers into integers.
731 if isinstance(output, list):
732 for index in xrange(0, len(output)):
733 if IsStrCanonicalInt(output[index]):
734 output[index] = int(output[index])
735 elif IsStrCanonicalInt(output):
736 output = int(output)
737
738 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
739 "Expanding %s to %s" % (repr(input), repr(output)))
740 return output
741
742
743 def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
744 # Process a 'conditions' or 'target_conditions' section in the_dict,
745 # depending on is_late. If is_late is False, 'conditions' is used.
746 #
747 # Each item in a conditions list consists of cond_expr, a string expression
748 # evaluated as the condition, and true_dict, a dict that will be merged into
749 # the_dict if cond_expr evaluates to true. Optionally, a third item,
750 # false_dict, may be present. false_dict is merged into the_dict if
751 # cond_expr evaluates to false.
752 #
753 # Any dict merged into the_dict will be recursively processed for nested
754 # conditionals and other expansions, also according to is_late, immediately
755 # prior to being merged.
756
757 if not is_late:
758 conditions_key = 'conditions'
759 else:
760 conditions_key = 'target_conditions'
761
762 if not conditions_key in the_dict:
763 return
764
765 conditions_list = the_dict[conditions_key]
766 # Unhook the conditions list, it's no longer needed.
767 del the_dict[conditions_key]
768
769 for condition in conditions_list:
770 if not isinstance(condition, list):
771 raise TypeError, conditions_key + ' must be a list'
772 if len(condition) != 2 and len(condition) != 3:
773 # It's possible that condition[0] won't work in which case this
774 # attempt will raise its own IndexError. That's probably fine.
775 raise IndexError, conditions_key + ' ' + condition[0] + \
776 ' must be length 2 or 3, not ' + len(condition)
777
778 [cond_expr, true_dict] = condition[0:2]
779 false_dict = None
780 if len(condition) == 3:
781 false_dict = condition[2]
782
783 # Do expansions on the condition itself. Since the conditon can naturally
784 # contain variable references without needing to resort to GYP expansion
785 # syntax, this is of dubious value for variables, but someone might want to
786 # use a command expansion directly inside a condition.
787 cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables,
788 build_file)
789 if not isinstance(cond_expr_expanded, str) and \
790 not isinstance(cond_expr_expanded, int):
791 raise ValueError, \
792 'Variable expansion in this context permits str and int ' + \
793 'only, found ' + expanded.__class__.__name__
794
795 try:
796 ast_code = compile(cond_expr_expanded, '<string>', 'eval')
797
798 if eval(ast_code, {'__builtins__': None}, variables):
799 merge_dict = true_dict
800 else:
801 merge_dict = false_dict
802 except SyntaxError, e:
803 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
804 'at character %d.' %
805 (str(e.args[0]), e.text, build_file, e.offset),
806 e.filename, e.lineno, e.offset, e.text)
807 raise syntax_error
808 except NameError, e:
809 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
810 (cond_expr_expanded, build_file))
811 raise
812
813 if merge_dict != None:
814 # Expand variables and nested conditinals in the merge_dict before
815 # merging it.
816 ProcessVariablesAndConditionsInDict(merge_dict, is_late,
817 variables, build_file)
818
819 MergeDicts(the_dict, merge_dict, build_file, build_file)
820
821
822 def LoadAutomaticVariablesFromDict(variables, the_dict):
823 # Any keys with plain string values in the_dict become automatic variables.
824 # The variable name is the key name with a "_" character prepended.
825 for key, value in the_dict.iteritems():
826 if isinstance(value, str) or isinstance(value, int) or \
827 isinstance(value, list):
828 variables['_' + key] = value
829
830
831 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
832 # Any keys in the_dict's "variables" dict, if it has one, becomes a
833 # variable. The variable name is the key name in the "variables" dict.
834 # Variables that end with the % character are set only if they are unset in
835 # the variables dict. the_dict_key is the name of the key that accesses
836 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
837 # (it could be a list or it could be parentless because it is a root dict),
838 # the_dict_key will be None.
839 for key, value in the_dict.get('variables', {}).iteritems():
840 if not isinstance(value, str) and not isinstance(value, int) and \
841 not isinstance(value, list):
842 continue
843
844 if key.endswith('%'):
845 variable_name = key[:-1]
846 if variable_name in variables:
847 # If the variable is already set, don't set it.
848 continue
849 if the_dict_key is 'variables' and variable_name in the_dict:
850 # If the variable is set without a % in the_dict, and the_dict is a
851 # variables dict (making |variables| a varaibles sub-dict of a
852 # variables dict), use the_dict's definition.
853 value = the_dict[variable_name]
854 else:
855 variable_name = key
856
857 variables[variable_name] = value
858
859
860 def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
861 build_file, the_dict_key=None):
862 """Handle all variable and command expansion and conditional evaluation.
863
864 This function is the public entry point for all variable expansions and
865 conditional evaluations. The variables_in dictionary will not be modified
866 by this function.
867 """
868
869 # Make a copy of the variables_in dict that can be modified during the
870 # loading of automatics and the loading of the variables dict.
871 variables = variables_in.copy()
872 LoadAutomaticVariablesFromDict(variables, the_dict)
873
874 if 'variables' in the_dict:
875 # Make sure all the local variables are added to the variables
876 # list before we process them so that you can reference one
877 # variable from another. They will be fully expanded by recursion
878 # in ExpandVariables.
879 for key, value in the_dict['variables'].iteritems():
880 variables[key] = value
881
882 # Handle the associated variables dict first, so that any variable
883 # references within can be resolved prior to using them as variables.
884 # Pass a copy of the variables dict to avoid having it be tainted.
885 # Otherwise, it would have extra automatics added for everything that
886 # should just be an ordinary variable in this scope.
887 ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late,
888 variables, build_file, 'variables')
889
890 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
891
892 for key, value in the_dict.iteritems():
893 # Skip "variables", which was already processed if present.
894 if key != 'variables' and isinstance(value, str):
895 expanded = ExpandVariables(value, is_late, variables, build_file)
896 if not isinstance(expanded, str) and not isinstance(expanded, int):
897 raise ValueError, \
898 'Variable expansion in this context permits str and int ' + \
899 'only, found ' + expanded.__class__.__name__ + ' for ' + key
900 the_dict[key] = expanded
901
902 # Variable expansion may have resulted in changes to automatics. Reload.
903 # TODO(mark): Optimization: only reload if no changes were made.
904 variables = variables_in.copy()
905 LoadAutomaticVariablesFromDict(variables, the_dict)
906 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
907
908 # Process conditions in this dict. This is done after variable expansion
909 # so that conditions may take advantage of expanded variables. For example,
910 # if the_dict contains:
911 # {'type': '<(library_type)',
912 # 'conditions': [['_type=="static_library"', { ... }]]},
913 # _type, as used in the condition, will only be set to the value of
914 # library_type if variable expansion is performed before condition
915 # processing. However, condition processing should occur prior to recursion
916 # so that variables (both automatic and "variables" dict type) may be
917 # adjusted by conditions sections, merged into the_dict, and have the
918 # intended impact on contained dicts.
919 #
920 # This arrangement means that a "conditions" section containing a "variables"
921 # section will only have those variables effective in subdicts, not in
922 # the_dict. The workaround is to put a "conditions" section within a
923 # "variables" section. For example:
924 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
925 # 'defines': ['<(define)'],
926 # 'my_subdict': {'defines': ['<(define)']}},
927 # will not result in "IS_MAC" being appended to the "defines" list in the
928 # current scope but would result in it being appended to the "defines" list
929 # within "my_subdict". By comparison:
930 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
931 # 'defines': ['<(define)'],
932 # 'my_subdict': {'defines': ['<(define)']}},
933 # will append "IS_MAC" to both "defines" lists.
934
935 # Evaluate conditions sections, allowing variable expansions within them
936 # as well as nested conditionals. This will process a 'conditions' or
937 # 'target_conditions' section, perform appropriate merging and recursive
938 # conditional and variable processing, and then remove the conditions section
939 # from the_dict if it is present.
940 ProcessConditionsInDict(the_dict, is_late, variables, build_file)
941
942 # Conditional processing may have resulted in changes to automatics or the
943 # variables dict. Reload.
944 variables = variables_in.copy()
945 LoadAutomaticVariablesFromDict(variables, the_dict)
946 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
947
948 # Recurse into child dicts, or process child lists which may result in
949 # further recursion into descendant dicts.
950 for key, value in the_dict.iteritems():
951 # Skip "variables" and string values, which were already processed if
952 # present.
953 if key == 'variables' or isinstance(value, str):
954 continue
955 if isinstance(value, dict):
956 # Pass a copy of the variables dict so that subdicts can't influence
957 # parents.
958 ProcessVariablesAndConditionsInDict(value, is_late, variables,
959 build_file, key)
960 elif isinstance(value, list):
961 # The list itself can't influence the variables dict, and
962 # ProcessVariablesAndConditionsInList will make copies of the variables
963 # dict if it needs to pass it to something that can influence it. No
964 # copy is necessary here.
965 ProcessVariablesAndConditionsInList(value, is_late, variables,
966 build_file)
967 elif not isinstance(value, int):
968 raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
969 ' for ' + key
970
971
972 def ProcessVariablesAndConditionsInList(the_list, is_late, variables,
973 build_file):
974 # Iterate using an index so that new values can be assigned into the_list.
975 index = 0
976 while index < len(the_list):
977 item = the_list[index]
978 if isinstance(item, dict):
979 # Make a copy of the variables dict so that it won't influence anything
980 # outside of its own scope.
981 ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file)
982 elif isinstance(item, list):
983 ProcessVariablesAndConditionsInList(item, is_late, variables, build_file)
984 elif isinstance(item, str):
985 expanded = ExpandVariables(item, is_late, variables, build_file)
986 if isinstance(expanded, str) or isinstance(expanded, int):
987 the_list[index] = expanded
988 elif isinstance(expanded, list):
989 del the_list[index]
990 for expanded_item in expanded:
991 the_list.insert(index, expanded_item)
992 index = index + 1
993
994 # index now identifies the next item to examine. Continue right now
995 # without falling into the index increment below.
996 continue
997 else:
998 raise ValueError, \
999 'Variable expansion in this context permits strings and ' + \
1000 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1001 index
1002 elif not isinstance(item, int):
1003 raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1004 ' at index ' + index
1005 index = index + 1
1006
1007
1008 def BuildTargetsDict(data):
1009 """Builds a dict mapping fully-qualified target names to their target dicts.
1010
1011 |data| is a dict mapping loaded build files by pathname relative to the
1012 current directory. Values in |data| are build file contents. For each
1013 |data| value with a "targets" key, the value of the "targets" key is taken
1014 as a list containing target dicts. Each target's fully-qualified name is
1015 constructed from the pathname of the build file (|data| key) and its
1016 "target_name" property. These fully-qualified names are used as the keys
1017 in the returned dict. These keys provide access to the target dicts,
1018 the dicts in the "targets" lists.
1019 """
1020
1021 targets = {}
1022 for build_file in data['target_build_files']:
1023 for target in data[build_file].get('targets', []):
1024 target_name = gyp.common.QualifiedTarget(build_file,
1025 target['target_name'],
1026 target['toolset'])
1027 if target_name in targets:
1028 raise KeyError, 'Duplicate target definitions for ' + target_name
1029 targets[target_name] = target
1030
1031 return targets
1032
1033
1034 def QualifyDependencies(targets):
1035 """Make dependency links fully-qualified relative to the current directory.
1036
1037 |targets| is a dict mapping fully-qualified target names to their target
1038 dicts. For each target in this dict, keys known to contain dependency
1039 links are examined, and any dependencies referenced will be rewritten
1040 so that they are fully-qualified and relative to the current directory.
1041 All rewritten dependencies are suitable for use as keys to |targets| or a
1042 similar dict.
1043 """
1044
1045 for target, target_dict in targets.iteritems():
1046 target_build_file = gyp.common.BuildFile(target)
1047 toolset = target_dict['toolset']
1048 for dependency_key in dependency_sections:
1049 dependencies = target_dict.get(dependency_key, [])
1050 for index in xrange(0, len(dependencies)):
1051 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1052 target_build_file, dependencies[index], toolset)
1053 global multiple_toolsets
1054 if not multiple_toolsets:
1055 # Ignore toolset specification in the dependency if it is specified.
1056 dep_toolset = toolset
1057 dependency = gyp.common.QualifiedTarget(dep_file,
1058 dep_target,
1059 dep_toolset)
1060 dependencies[index] = dependency
1061
1062 # Make sure anything appearing in a list other than "dependencies" also
1063 # appears in the "dependencies" list.
1064 if dependency_key != 'dependencies' and \
1065 dependency not in target_dict['dependencies']:
1066 raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \
1067 ' of ' + target + ', but not in dependencies'
1068
1069
1070 def ExpandWildcardDependencies(targets, data):
1071 """Expands dependencies specified as build_file:*.
1072
1073 For each target in |targets|, examines sections containing links to other
1074 targets. If any such section contains a link of the form build_file:*, it
1075 is taken as a wildcard link, and is expanded to list each target in
1076 build_file. The |data| dict provides access to build file dicts.
1077
1078 Any target that does not wish to be included by wildcard can provide an
1079 optional "suppress_wildcard" key in its target dict. When present and
1080 true, a wildcard dependency link will not include such targets.
1081
1082 All dependency names, including the keys to |targets| and the values in each
1083 dependency list, must be qualified when this function is called.
1084 """
1085
1086 for target, target_dict in targets.iteritems():
1087 toolset = target_dict['toolset']
1088 target_build_file = gyp.common.BuildFile(target)
1089 for dependency_key in dependency_sections:
1090 dependencies = target_dict.get(dependency_key, [])
1091
1092 # Loop this way instead of "for dependency in" or "for index in xrange"
1093 # because the dependencies list will be modified within the loop body.
1094 index = 0
1095 while index < len(dependencies):
1096 (dependency_build_file, dependency_target, dependency_toolset) = \
1097 gyp.common.ParseQualifiedTarget(dependencies[index])
1098 if dependency_target != '*' and dependency_toolset != '*':
1099 # Not a wildcard. Keep it moving.
1100 index = index + 1
1101 continue
1102
1103 if dependency_build_file == target_build_file:
1104 # It's an error for a target to depend on all other targets in
1105 # the same file, because a target cannot depend on itself.
1106 raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \
1107 target + ' referring to same build file'
1108
1109 # Take the wildcard out and adjust the index so that the next
1110 # dependency in the list will be processed the next time through the
1111 # loop.
1112 del dependencies[index]
1113 index = index - 1
1114
1115 # Loop through the targets in the other build file, adding them to
1116 # this target's list of dependencies in place of the removed
1117 # wildcard.
1118 dependency_target_dicts = data[dependency_build_file]['targets']
1119 for dependency_target_dict in dependency_target_dicts:
1120 if int(dependency_target_dict.get('suppress_wildcard', False)):
1121 continue
1122 dependency_target_name = dependency_target_dict['target_name']
1123 if (dependency_target != '*' and
1124 dependency_target != dependency_target_name):
1125 continue
1126 dependency_target_toolset = dependency_target_dict['toolset']
1127 if (dependency_toolset != '*' and
1128 dependency_toolset != dependency_target_toolset):
1129 continue
1130 dependency = gyp.common.QualifiedTarget(dependency_build_file,
1131 dependency_target_name,
1132 dependency_target_toolset)
1133 index = index + 1
1134 dependencies.insert(index, dependency)
1135
1136 index = index + 1
1137
1138
1139 class DependencyGraphNode(object):
1140 """
1141
1142 Attributes:
1143 ref: A reference to an object that this DependencyGraphNode represents.
1144 dependencies: List of DependencyGraphNodes on which this one depends.
1145 dependents: List of DependencyGraphNodes that depend on this one.
1146 """
1147
1148 class CircularException(Exception):
1149 pass
1150
1151 def __init__(self, ref):
1152 self.ref = ref
1153 self.dependencies = []
1154 self.dependents = []
1155
1156 def FlattenToList(self):
1157 # flat_list is the sorted list of dependencies - actually, the list items
1158 # are the "ref" attributes of DependencyGraphNodes. Every target will
1159 # appear in flat_list after all of its dependencies, and before all of its
1160 # dependents.
1161 flat_list = []
1162
1163 # in_degree_zeros is the list of DependencyGraphNodes that have no
1164 # dependencies not in flat_list. Initially, it is a copy of the children
1165 # of this node, because when the graph was built, nodes with no
1166 # dependencies were made implicit dependents of the root node.
1167 in_degree_zeros = self.dependents[:]
1168
1169 while in_degree_zeros:
1170 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1171 # can be appended to flat_list. Take these nodes out of in_degree_zeros
1172 # as work progresses, so that the next node to process from the list can
1173 # always be accessed at a consistent position.
1174 node = in_degree_zeros.pop(0)
1175 flat_list.append(node.ref)
1176
1177 # Look at dependents of the node just added to flat_list. Some of them
1178 # may now belong in in_degree_zeros.
1179 for node_dependent in node.dependents:
1180 is_in_degree_zero = True
1181 for node_dependent_dependency in node_dependent.dependencies:
1182 if not node_dependent_dependency.ref in flat_list:
1183 # The dependent one or more dependencies not in flat_list. There
1184 # will be more chances to add it to flat_list when examining
1185 # it again as a dependent of those other dependencies, provided
1186 # that there are no cycles.
1187 is_in_degree_zero = False
1188 break
1189
1190 if is_in_degree_zero:
1191 # All of the dependent's dependencies are already in flat_list. Add
1192 # it to in_degree_zeros where it will be processed in a future
1193 # iteration of the outer loop.
1194 in_degree_zeros.append(node_dependent)
1195
1196 return flat_list
1197
1198 def DirectDependencies(self, dependencies=None):
1199 """Returns a list of just direct dependencies."""
1200 if dependencies == None:
1201 dependencies = []
1202
1203 for dependency in self.dependencies:
1204 # Check for None, corresponding to the root node.
1205 if dependency.ref != None and dependency.ref not in dependencies:
1206 dependencies.append(dependency.ref)
1207
1208 return dependencies
1209
1210 def _AddImportedDependencies(self, targets, dependencies=None):
1211 """Given a list of direct dependencies, adds indirect dependencies that
1212 other dependencies have declared to export their settings.
1213
1214 This method does not operate on self. Rather, it operates on the list
1215 of dependencies in the |dependencies| argument. For each dependency in
1216 that list, if any declares that it exports the settings of one of its
1217 own dependencies, those dependencies whose settings are "passed through"
1218 are added to the list. As new items are added to the list, they too will
1219 be processed, so it is possible to import settings through multiple levels
1220 of dependencies.
1221
1222 This method is not terribly useful on its own, it depends on being
1223 "primed" with a list of direct dependencies such as one provided by
1224 DirectDependencies. DirectAndImportedDependencies is intended to be the
1225 public entry point.
1226 """
1227
1228 if dependencies == None:
1229 dependencies = []
1230
1231 index = 0
1232 while index < len(dependencies):
1233 dependency = dependencies[index]
1234 dependency_dict = targets[dependency]
1235 # Add any dependencies whose settings should be imported to the list
1236 # if not already present. Newly-added items will be checked for
1237 # their own imports when the list iteration reaches them.
1238 # Rather than simply appending new items, insert them after the
1239 # dependency that exported them. This is done to more closely match
1240 # the depth-first method used by DeepDependencies.
1241 add_index = 1
1242 for imported_dependency in \
1243 dependency_dict.get('export_dependent_settings', []):
1244 if imported_dependency not in dependencies:
1245 dependencies.insert(index + add_index, imported_dependency)
1246 add_index = add_index + 1
1247 index = index + 1
1248
1249 return dependencies
1250
1251 def DirectAndImportedDependencies(self, targets, dependencies=None):
1252 """Returns a list of a target's direct dependencies and all indirect
1253 dependencies that a dependency has advertised settings should be exported
1254 through the dependency for.
1255 """
1256
1257 dependencies = self.DirectDependencies(dependencies)
1258 return self._AddImportedDependencies(targets, dependencies)
1259
1260 def DeepDependencies(self, dependencies=None):
1261 """Returns a list of all of a target's dependencies, recursively."""
1262 if dependencies == None:
1263 dependencies = []
1264
1265 for dependency in self.dependencies:
1266 # Check for None, corresponding to the root node.
1267 if dependency.ref != None and dependency.ref not in dependencies:
1268 dependencies.append(dependency.ref)
1269 dependency.DeepDependencies(dependencies)
1270
1271 return dependencies
1272
1273 def LinkDependencies(self, targets, dependencies=None, initial=True):
1274 """Returns a list of dependency targets that are linked into this target.
1275
1276 This function has a split personality, depending on the setting of
1277 |initial|. Outside callers should always leave |initial| at its default
1278 setting.
1279
1280 When adding a target to the list of dependencies, this function will
1281 recurse into itself with |initial| set to False, to collect depenedencies
1282 that are linked into the linkable target for which the list is being built.
1283 """
1284 if dependencies == None:
1285 dependencies = []
1286
1287 # Check for None, corresponding to the root node.
1288 if self.ref == None:
1289 return dependencies
1290
1291 # It's kind of sucky that |targets| has to be passed into this function,
1292 # but that's presently the easiest way to access the target dicts so that
1293 # this function can find target types.
1294
1295 if not 'target_name' in targets[self.ref]:
1296 raise Exception("Missing 'target_name' field in target.")
1297
1298 try:
1299 target_type = targets[self.ref]['type']
1300 except KeyError, e:
1301 raise Exception("Missing 'type' field in target %s" %
1302 targets[self.ref]['target_name'])
1303
1304 is_linkable = target_type in linkable_types
1305
1306 if initial and not is_linkable:
1307 # If this is the first target being examined and it's not linkable,
1308 # return an empty list of link dependencies, because the link
1309 # dependencies are intended to apply to the target itself (initial is
1310 # True) and this target won't be linked.
1311 return dependencies
1312
1313 # Executables and loadable modules are already fully and finally linked.
1314 # Nothing else can be a link dependency of them, there can only be
1315 # dependencies in the sense that a dependent target might run an
1316 # executable or load the loadable_module.
1317 if not initial and target_type in ('executable', 'loadable_module'):
1318 return dependencies
1319
1320 # The target is linkable, add it to the list of link dependencies.
1321 if self.ref not in dependencies:
1322 if target_type != 'none':
1323 # Special case: "none" type targets don't produce any linkable products
1324 # and shouldn't be exposed as link dependencies, although dependencies
1325 # of "none" type targets may still be link dependencies.
1326 dependencies.append(self.ref)
1327 if initial or not is_linkable:
1328 # If this is a subsequent target and it's linkable, don't look any
1329 # further for linkable dependencies, as they'll already be linked into
1330 # this target linkable. Always look at dependencies of the initial
1331 # target, and always look at dependencies of non-linkables.
1332 for dependency in self.dependencies:
1333 dependency.LinkDependencies(targets, dependencies, False)
1334
1335 return dependencies
1336
1337
1338 def BuildDependencyList(targets):
1339 # Create a DependencyGraphNode for each target. Put it into a dict for easy
1340 # access.
1341 dependency_nodes = {}
1342 for target, spec in targets.iteritems():
1343 if not target in dependency_nodes:
1344 dependency_nodes[target] = DependencyGraphNode(target)
1345
1346 # Set up the dependency links. Targets that have no dependencies are treated
1347 # as dependent on root_node.
1348 root_node = DependencyGraphNode(None)
1349 for target, spec in targets.iteritems():
1350 target_node = dependency_nodes[target]
1351 target_build_file = gyp.common.BuildFile(target)
1352 if not 'dependencies' in spec or len(spec['dependencies']) == 0:
1353 target_node.dependencies = [root_node]
1354 root_node.dependents.append(target_node)
1355 else:
1356 dependencies = spec['dependencies']
1357 for index in xrange(0, len(dependencies)):
1358 try:
1359 dependency = dependencies[index]
1360 dependency_node = dependency_nodes[dependency]
1361 target_node.dependencies.append(dependency_node)
1362 dependency_node.dependents.append(target_node)
1363 except KeyError, e:
1364 gyp.common.ExceptionAppend(e,
1365 'while trying to load target %s' % target)
1366 raise
1367
1368 flat_list = root_node.FlattenToList()
1369
1370 # If there's anything left unvisited, there must be a circular dependency
1371 # (cycle). If you need to figure out what's wrong, look for elements of
1372 # targets that are not in flat_list.
1373 if len(flat_list) != len(targets):
1374 raise DependencyGraphNode.CircularException, \
1375 'Some targets not reachable, cycle in dependency graph detected'
1376
1377 return [dependency_nodes, flat_list]
1378
1379
1380 def VerifyNoGYPFileCircularDependencies(targets):
1381 # Create a DependencyGraphNode for each gyp file containing a target. Put
1382 # it into a dict for easy access.
1383 dependency_nodes = {}
1384 for target in targets.iterkeys():
1385 build_file = gyp.common.BuildFile(target)
1386 if not build_file in dependency_nodes:
1387 dependency_nodes[build_file] = DependencyGraphNode(build_file)
1388
1389 # Set up the dependency links.
1390 for target, spec in targets.iteritems():
1391 build_file = gyp.common.BuildFile(target)
1392 build_file_node = dependency_nodes[build_file]
1393 target_dependencies = spec.get('dependencies', [])
1394 for dependency in target_dependencies:
1395 try:
1396 dependency_build_file = gyp.common.BuildFile(dependency)
1397 if dependency_build_file == build_file:
1398 # A .gyp file is allowed to refer back to itself.
1399 continue
1400 dependency_node = dependency_nodes[dependency_build_file]
1401 if dependency_node not in build_file_node.dependencies:
1402 build_file_node.dependencies.append(dependency_node)
1403 dependency_node.dependents.append(build_file_node)
1404 except KeyError, e:
1405 gyp.common.ExceptionAppend(
1406 e, 'while computing dependencies of .gyp file %s' % build_file)
1407 raise
1408
1409 # Files that have no dependencies are treated as dependent on root_node.
1410 root_node = DependencyGraphNode(None)
1411 for build_file_node in dependency_nodes.itervalues():
1412 if len(build_file_node.dependencies) == 0:
1413 build_file_node.dependencies.append(root_node)
1414 root_node.dependents.append(build_file_node)
1415
1416 flat_list = root_node.FlattenToList()
1417
1418 # If there's anything left unvisited, there must be a circular dependency
1419 # (cycle).
1420 if len(flat_list) != len(dependency_nodes):
1421 bad_files = []
1422 for file in dependency_nodes.iterkeys():
1423 if not file in flat_list:
1424 bad_files.append(file)
1425 raise DependencyGraphNode.CircularException, \
1426 'Some files not reachable, cycle in .gyp file dependency graph ' + \
1427 'detected involving some or all of: ' + \
1428 ' '.join(bad_files)
1429
1430
1431 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1432 # key should be one of all_dependent_settings, direct_dependent_settings,
1433 # or link_settings.
1434
1435 for target in flat_list:
1436 target_dict = targets[target]
1437 build_file = gyp.common.BuildFile(target)
1438
1439 if key == 'all_dependent_settings':
1440 dependencies = dependency_nodes[target].DeepDependencies()
1441 elif key == 'direct_dependent_settings':
1442 dependencies = \
1443 dependency_nodes[target].DirectAndImportedDependencies(targets)
1444 elif key == 'link_settings':
1445 dependencies = dependency_nodes[target].LinkDependencies(targets)
1446 else:
1447 raise KeyError, "DoDependentSettings doesn't know how to determine " + \
1448 'dependencies for ' + key
1449
1450 for dependency in dependencies:
1451 dependency_dict = targets[dependency]
1452 if not key in dependency_dict:
1453 continue
1454 dependency_build_file = gyp.common.BuildFile(dependency)
1455 MergeDicts(target_dict, dependency_dict[key],
1456 build_file, dependency_build_file)
1457
1458
1459 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes):
1460 # Recompute target "dependencies" properties. For each static library
1461 # target, remove "dependencies" entries referring to other static libraries,
1462 # unless the dependency has the "hard_dependency" attribute set. For each
1463 # linkable target, add a "dependencies" entry referring to all of the
1464 # target's computed list of link dependencies (including static libraries
1465 # if no such entry is already present.
1466 for target in flat_list:
1467 target_dict = targets[target]
1468 target_type = target_dict['type']
1469
1470 if target_type == 'static_library':
1471 if not 'dependencies' in target_dict:
1472 continue
1473
1474 target_dict['dependencies_original'] = target_dict.get(
1475 'dependencies', [])[:]
1476
1477 index = 0
1478 while index < len(target_dict['dependencies']):
1479 dependency = target_dict['dependencies'][index]
1480 dependency_dict = targets[dependency]
1481 if dependency_dict['type'] == 'static_library' and \
1482 (not 'hard_dependency' in dependency_dict or \
1483 not dependency_dict['hard_dependency']):
1484 # A static library should not depend on another static library unless
1485 # the dependency relationship is "hard," which should only be done
1486 # when a dependent relies on some side effect other than just the
1487 # build product, like a rule or action output. Take the dependency
1488 # out of the list, and don't increment index because the next
1489 # dependency to analyze will shift into the index formerly occupied
1490 # by the one being removed.
1491 del target_dict['dependencies'][index]
1492 else:
1493 index = index + 1
1494
1495 # If the dependencies list is empty, it's not needed, so unhook it.
1496 if len(target_dict['dependencies']) == 0:
1497 del target_dict['dependencies']
1498
1499 elif target_type in linkable_types:
1500 # Get a list of dependency targets that should be linked into this
1501 # target. Add them to the dependencies list if they're not already
1502 # present.
1503
1504 link_dependencies = dependency_nodes[target].LinkDependencies(targets)
1505 for dependency in link_dependencies:
1506 if dependency == target:
1507 continue
1508 if not 'dependencies' in target_dict:
1509 target_dict['dependencies'] = []
1510 if not dependency in target_dict['dependencies']:
1511 target_dict['dependencies'].append(dependency)
1512
1513 # Initialize this here to speed up MakePathRelative.
1514 exception_re = re.compile(r'''["']?[-/$<>]''')
1515
1516
1517 def MakePathRelative(to_file, fro_file, item):
1518 # If item is a relative path, it's relative to the build file dict that it's
1519 # coming from. Fix it up to make it relative to the build file dict that
1520 # it's going into.
1521 # Exception: any |item| that begins with these special characters is
1522 # returned without modification.
1523 # / Used when a path is already absolute (shortcut optimization;
1524 # such paths would be returned as absolute anyway)
1525 # $ Used for build environment variables
1526 # - Used for some build environment flags (such as -lapr-1 in a
1527 # "libraries" section)
1528 # < Used for our own variable and command expansions (see ExpandVariables)
1529 # > Used for our own variable and command expansions (see ExpandVariables)
1530 #
1531 # "/' Used when a value is quoted. If these are present, then we
1532 # check the second character instead.
1533 #
1534 if to_file == fro_file or exception_re.match(item):
1535 return item
1536 else:
1537 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1538 # temporary measure. This should really be addressed by keeping all paths
1539 # in POSIX until actual project generation.
1540 ret = os.path.normpath(os.path.join(
1541 gyp.common.RelativePath(os.path.dirname(fro_file),
1542 os.path.dirname(to_file)),
1543 item)).replace('\\', '/')
1544 if item[-1] == '/':
1545 ret += '/'
1546 return ret
1547
1548 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1549 prepend_index = 0
1550
1551 for item in fro:
1552 singleton = False
1553 if isinstance(item, str) or isinstance(item, int):
1554 # The cheap and easy case.
1555 if is_paths:
1556 to_item = MakePathRelative(to_file, fro_file, item)
1557 else:
1558 to_item = item
1559
1560 if not isinstance(item, str) or not item.startswith('-'):
1561 # Any string that doesn't begin with a "-" is a singleton - it can
1562 # only appear once in a list, to be enforced by the list merge append
1563 # or prepend.
1564 singleton = True
1565 elif isinstance(item, dict):
1566 # Make a copy of the dictionary, continuing to look for paths to fix.
1567 # The other intelligent aspects of merge processing won't apply because
1568 # item is being merged into an empty dict.
1569 to_item = {}
1570 MergeDicts(to_item, item, to_file, fro_file)
1571 elif isinstance(item, list):
1572 # Recurse, making a copy of the list. If the list contains any
1573 # descendant dicts, path fixing will occur. Note that here, custom
1574 # values for is_paths and append are dropped; those are only to be
1575 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1576 # matter anyway because the new |to_item| list is empty.
1577 to_item = []
1578 MergeLists(to_item, item, to_file, fro_file)
1579 else:
1580 raise TypeError, \
1581 'Attempt to merge list item of unsupported type ' + \
1582 item.__class__.__name__
1583
1584 if append:
1585 # If appending a singleton that's already in the list, don't append.
1586 # This ensures that the earliest occurrence of the item will stay put.
1587 if not singleton or not to_item in to:
1588 to.append(to_item)
1589 else:
1590 # If prepending a singleton that's already in the list, remove the
1591 # existing instance and proceed with the prepend. This ensures that the
1592 # item appears at the earliest possible position in the list.
1593 while singleton and to_item in to:
1594 to.remove(to_item)
1595
1596 # Don't just insert everything at index 0. That would prepend the new
1597 # items to the list in reverse order, which would be an unwelcome
1598 # surprise.
1599 to.insert(prepend_index, to_item)
1600 prepend_index = prepend_index + 1
1601
1602
1603 def MergeDicts(to, fro, to_file, fro_file):
1604 # I wanted to name the parameter "from" but it's a Python keyword...
1605 for k, v in fro.iteritems():
1606 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
1607 # copy semantics. Something else may want to merge from the |fro| dict
1608 # later, and having the same dict ref pointed to twice in the tree isn't
1609 # what anyone wants considering that the dicts may subsequently be
1610 # modified.
1611 if k in to:
1612 bad_merge = False
1613 if isinstance(v, str) or isinstance(v, int):
1614 if not (isinstance(to[k], str) or isinstance(to[k], int)):
1615 bad_merge = True
1616 elif v.__class__ != to[k].__class__:
1617 bad_merge = True
1618
1619 if bad_merge:
1620 raise TypeError, \
1621 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1622 ' into incompatible type ' + to[k].__class__.__name__ + \
1623 ' for key ' + k
1624 if isinstance(v, str) or isinstance(v, int):
1625 # Overwrite the existing value, if any. Cheap and easy.
1626 is_path = IsPathSection(k)
1627 if is_path:
1628 to[k] = MakePathRelative(to_file, fro_file, v)
1629 else:
1630 to[k] = v
1631 elif isinstance(v, dict):
1632 # Recurse, guaranteeing copies will be made of objects that require it.
1633 if not k in to:
1634 to[k] = {}
1635 MergeDicts(to[k], v, to_file, fro_file)
1636 elif isinstance(v, list):
1637 # Lists in dicts can be merged with different policies, depending on
1638 # how the key in the "from" dict (k, the from-key) is written.
1639 #
1640 # If the from-key has ...the to-list will have this action
1641 # this character appended:... applied when receiving the from-list:
1642 # = replace
1643 # + prepend
1644 # ? set, only if to-list does not yet exist
1645 # (none) append
1646 #
1647 # This logic is list-specific, but since it relies on the associated
1648 # dict key, it's checked in this dict-oriented function.
1649 ext = k[-1]
1650 append = True
1651 if ext == '=':
1652 list_base = k[:-1]
1653 lists_incompatible = [list_base, list_base + '?']
1654 to[list_base] = []
1655 elif ext == '+':
1656 list_base = k[:-1]
1657 lists_incompatible = [list_base + '=', list_base + '?']
1658 append = False
1659 elif ext == '?':
1660 list_base = k[:-1]
1661 lists_incompatible = [list_base, list_base + '=', list_base + '+']
1662 else:
1663 list_base = k
1664 lists_incompatible = [list_base + '=', list_base + '?']
1665
1666 # Some combinations of merge policies appearing together are meaningless.
1667 # It's stupid to replace and append simultaneously, for example. Append
1668 # and prepend are the only policies that can coexist.
1669 for list_incompatible in lists_incompatible:
1670 if list_incompatible in fro:
1671 raise KeyError, 'Incompatible list policies ' + k + ' and ' + \
1672 list_incompatible
1673
1674 if list_base in to:
1675 if ext == '?':
1676 # If the key ends in "?", the list will only be merged if it doesn't
1677 # already exist.
1678 continue
1679 if not isinstance(to[list_base], list):
1680 # This may not have been checked above if merging in a list with an
1681 # extension character.
1682 raise TypeError, \
1683 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1684 ' into incompatible type ' + to[list_base].__class__.__name__ + \
1685 ' for key ' + list_base + '(' + k + ')'
1686 else:
1687 to[list_base] = []
1688
1689 # Call MergeLists, which will make copies of objects that require it.
1690 # MergeLists can recurse back into MergeDicts, although this will be
1691 # to make copies of dicts (with paths fixed), there will be no
1692 # subsequent dict "merging" once entering a list because lists are
1693 # always replaced, appended to, or prepended to.
1694 is_paths = IsPathSection(list_base)
1695 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
1696 else:
1697 raise TypeError, \
1698 'Attempt to merge dict value of unsupported type ' + \
1699 v.__class__.__name__ + ' for key ' + k
1700
1701
1702 def MergeConfigWithInheritance(new_configuration_dict, build_file,
1703 target_dict, configuration, visited):
1704 # Skip if previously visted.
1705 if configuration in visited:
1706 return
1707
1708 # Look at this configuration.
1709 configuration_dict = target_dict['configurations'][configuration]
1710
1711 # Merge in parents.
1712 for parent in configuration_dict.get('inherit_from', []):
1713 MergeConfigWithInheritance(new_configuration_dict, build_file,
1714 target_dict, parent, visited + [configuration])
1715
1716 # Merge it into the new config.
1717 MergeDicts(new_configuration_dict, configuration_dict,
1718 build_file, build_file)
1719
1720 # Drop abstract.
1721 if 'abstract' in new_configuration_dict:
1722 del new_configuration_dict['abstract']
1723
1724
1725 def SetUpConfigurations(target, target_dict):
1726 global non_configuration_keys
1727 # key_suffixes is a list of key suffixes that might appear on key names.
1728 # These suffixes are handled in conditional evaluations (for =, +, and ?)
1729 # and rules/exclude processing (for ! and /). Keys with these suffixes
1730 # should be treated the same as keys without.
1731 key_suffixes = ['=', '+', '?', '!', '/']
1732
1733 build_file = gyp.common.BuildFile(target)
1734
1735 # Provide a single configuration by default if none exists.
1736 # TODO(mark): Signal an error if default_configurations exists but
1737 # configurations does not.
1738 if not 'configurations' in target_dict:
1739 target_dict['configurations'] = {'Default': {}}
1740 if not 'default_configuration' in target_dict:
1741 concrete = [i for i in target_dict['configurations'].keys()
1742 if not target_dict['configurations'][i].get('abstract')]
1743 target_dict['default_configuration'] = sorted(concrete)[0]
1744
1745 for configuration in target_dict['configurations'].keys():
1746 old_configuration_dict = target_dict['configurations'][configuration]
1747 # Skip abstract configurations (saves work only).
1748 if old_configuration_dict.get('abstract'):
1749 continue
1750 # Configurations inherit (most) settings from the enclosing target scope.
1751 # Get the inheritance relationship right by making a copy of the target
1752 # dict.
1753 new_configuration_dict = copy.deepcopy(target_dict)
1754
1755 # Take out the bits that don't belong in a "configurations" section.
1756 # Since configuration setup is done before conditional, exclude, and rules
1757 # processing, be careful with handling of the suffix characters used in
1758 # those phases.
1759 delete_keys = []
1760 for key in new_configuration_dict:
1761 key_ext = key[-1:]
1762 if key_ext in key_suffixes:
1763 key_base = key[:-1]
1764 else:
1765 key_base = key
1766 if key_base in non_configuration_keys:
1767 delete_keys.append(key)
1768
1769 for key in delete_keys:
1770 del new_configuration_dict[key]
1771
1772 # Merge in configuration (with all its parents first).
1773 MergeConfigWithInheritance(new_configuration_dict, build_file,
1774 target_dict, configuration, [])
1775
1776 # Put the new result back into the target dict as a configuration.
1777 target_dict['configurations'][configuration] = new_configuration_dict
1778
1779 # Now drop all the abstract ones.
1780 for configuration in target_dict['configurations'].keys():
1781 old_configuration_dict = target_dict['configurations'][configuration]
1782 if old_configuration_dict.get('abstract'):
1783 del target_dict['configurations'][configuration]
1784
1785 # Now that all of the target's configurations have been built, go through
1786 # the target dict's keys and remove everything that's been moved into a
1787 # "configurations" section.
1788 delete_keys = []
1789 for key in target_dict:
1790 key_ext = key[-1:]
1791 if key_ext in key_suffixes:
1792 key_base = key[:-1]
1793 else:
1794 key_base = key
1795 if not key_base in non_configuration_keys:
1796 delete_keys.append(key)
1797 for key in delete_keys:
1798 del target_dict[key]
1799
1800 # Check the configurations to see if they contain invalid keys.
1801 for configuration in target_dict['configurations'].keys():
1802 configuration_dict = target_dict['configurations'][configuration]
1803 for key in configuration_dict.keys():
1804 if key in invalid_configuration_keys:
1805 raise KeyError, ('%s not allowed in the %s configuration, found in '
1806 'target %s' % (key, configuration, target))
1807
1808
1809
1810 def ProcessListFiltersInDict(name, the_dict):
1811 """Process regular expression and exclusion-based filters on lists.
1812
1813 An exclusion list is in a dict key named with a trailing "!", like
1814 "sources!". Every item in such a list is removed from the associated
1815 main list, which in this example, would be "sources". Removed items are
1816 placed into a "sources_excluded" list in the dict.
1817
1818 Regular expression (regex) filters are contained in dict keys named with a
1819 trailing "/", such as "sources/" to operate on the "sources" list. Regex
1820 filters in a dict take the form:
1821 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ],
1822 ['include', '_mac\\.cc$'] ],
1823 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
1824 _win.cc. The second filter then includes all files ending in _mac.cc that
1825 are now or were once in the "sources" list. Items matching an "exclude"
1826 filter are subject to the same processing as would occur if they were listed
1827 by name in an exclusion list (ending in "!"). Items matching an "include"
1828 filter are brought back into the main list if previously excluded by an
1829 exclusion list or exclusion regex filter. Subsequent matching "exclude"
1830 patterns can still cause items to be excluded after matching an "include".
1831 """
1832
1833 # Look through the dictionary for any lists whose keys end in "!" or "/".
1834 # These are lists that will be treated as exclude lists and regular
1835 # expression-based exclude/include lists. Collect the lists that are
1836 # needed first, looking for the lists that they operate on, and assemble
1837 # then into |lists|. This is done in a separate loop up front, because
1838 # the _included and _excluded keys need to be added to the_dict, and that
1839 # can't be done while iterating through it.
1840
1841 lists = []
1842 del_lists = []
1843 for key, value in the_dict.iteritems():
1844 operation = key[-1]
1845 if operation != '!' and operation != '/':
1846 continue
1847
1848 if not isinstance(value, list):
1849 raise ValueError, name + ' key ' + key + ' must be list, not ' + \
1850 value.__class__.__name__
1851
1852 list_key = key[:-1]
1853 if list_key not in the_dict:
1854 # This happens when there's a list like "sources!" but no corresponding
1855 # "sources" list. Since there's nothing for it to operate on, queue up
1856 # the "sources!" list for deletion now.
1857 del_lists.append(key)
1858 continue
1859
1860 if not isinstance(the_dict[list_key], list):
1861 raise ValueError, name + ' key ' + list_key + \
1862 ' must be list, not ' + \
1863 value.__class__.__name__ + ' when applying ' + \
1864 {'!': 'exclusion', '/': 'regex'}[operation]
1865
1866 if not list_key in lists:
1867 lists.append(list_key)
1868
1869 # Delete the lists that are known to be unneeded at this point.
1870 for del_list in del_lists:
1871 del the_dict[del_list]
1872
1873 for list_key in lists:
1874 the_list = the_dict[list_key]
1875
1876 # Initialize the list_actions list, which is parallel to the_list. Each
1877 # item in list_actions identifies whether the corresponding item in
1878 # the_list should be excluded, unconditionally preserved (included), or
1879 # whether no exclusion or inclusion has been applied. Items for which
1880 # no exclusion or inclusion has been applied (yet) have value -1, items
1881 # excluded have value 0, and items included have value 1. Includes and
1882 # excludes override previous actions. All items in list_actions are
1883 # initialized to -1 because no excludes or includes have been processed
1884 # yet.
1885 list_actions = list((-1,) * len(the_list))
1886
1887 exclude_key = list_key + '!'
1888 if exclude_key in the_dict:
1889 for exclude_item in the_dict[exclude_key]:
1890 for index in xrange(0, len(the_list)):
1891 if exclude_item == the_list[index]:
1892 # This item matches the exclude_item, so set its action to 0
1893 # (exclude).
1894 list_actions[index] = 0
1895
1896 # The "whatever!" list is no longer needed, dump it.
1897 del the_dict[exclude_key]
1898
1899 regex_key = list_key + '/'
1900 if regex_key in the_dict:
1901 for regex_item in the_dict[regex_key]:
1902 [action, pattern] = regex_item
1903 pattern_re = re.compile(pattern)
1904
1905 for index in xrange(0, len(the_list)):
1906 list_item = the_list[index]
1907 if pattern_re.search(list_item):
1908 # Regular expression match.
1909
1910 if action == 'exclude':
1911 # This item matches an exclude regex, so set its value to 0
1912 # (exclude).
1913 list_actions[index] = 0
1914 elif action == 'include':
1915 # This item matches an include regex, so set its value to 1
1916 # (include).
1917 list_actions[index] = 1
1918 else:
1919 # This is an action that doesn't make any sense.
1920 raise ValueError, 'Unrecognized action ' + action + ' in ' + \
1921 name + ' key ' + key
1922
1923 # The "whatever/" list is no longer needed, dump it.
1924 del the_dict[regex_key]
1925
1926 # Add excluded items to the excluded list.
1927 #
1928 # Note that exclude_key ("sources!") is different from excluded_key
1929 # ("sources_excluded"). The exclude_key list is input and it was already
1930 # processed and deleted; the excluded_key list is output and it's about
1931 # to be created.
1932 excluded_key = list_key + '_excluded'
1933 if excluded_key in the_dict:
1934 raise KeyError, \
1935 name + ' key ' + excluded_key + ' must not be present prior ' + \
1936 ' to applying exclusion/regex filters for ' + list_key
1937
1938 excluded_list = []
1939
1940 # Go backwards through the list_actions list so that as items are deleted,
1941 # the indices of items that haven't been seen yet don't shift. That means
1942 # that things need to be prepended to excluded_list to maintain them in the
1943 # same order that they existed in the_list.
1944 for index in xrange(len(list_actions) - 1, -1, -1):
1945 if list_actions[index] == 0:
1946 # Dump anything with action 0 (exclude). Keep anything with action 1
1947 # (include) or -1 (no include or exclude seen for the item).
1948 excluded_list.insert(0, the_list[index])
1949 del the_list[index]
1950
1951 # If anything was excluded, put the excluded list into the_dict at
1952 # excluded_key.
1953 if len(excluded_list) > 0:
1954 the_dict[excluded_key] = excluded_list
1955
1956 # Now recurse into subdicts and lists that may contain dicts.
1957 for key, value in the_dict.iteritems():
1958 if isinstance(value, dict):
1959 ProcessListFiltersInDict(key, value)
1960 elif isinstance(value, list):
1961 ProcessListFiltersInList(key, value)
1962
1963
1964 def ProcessListFiltersInList(name, the_list):
1965 for item in the_list:
1966 if isinstance(item, dict):
1967 ProcessListFiltersInDict(name, item)
1968 elif isinstance(item, list):
1969 ProcessListFiltersInList(name, item)
1970
1971
1972 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
1973 """Ensures that the rules sections in target_dict are valid and consistent,
1974 and determines which sources they apply to.
1975
1976 Arguments:
1977 target: string, name of target.
1978 target_dict: dict, target spec containing "rules" and "sources" lists.
1979 extra_sources_for_rules: a list of keys to scan for rule matches in
1980 addition to 'sources'.
1981 """
1982
1983 # Dicts to map between values found in rules' 'rule_name' and 'extension'
1984 # keys and the rule dicts themselves.
1985 rule_names = {}
1986 rule_extensions = {}
1987
1988 rules = target_dict.get('rules', [])
1989 for rule in rules:
1990 # Make sure that there's no conflict among rule names and extensions.
1991 rule_name = rule['rule_name']
1992 if rule_name in rule_names:
1993 raise KeyError, 'rule %s exists in duplicate, target %s' % \
1994 (rule_name, target)
1995 rule_names[rule_name] = rule
1996
1997 rule_extension = rule['extension']
1998 if rule_extension in rule_extensions:
1999 raise KeyError, ('extension %s associated with multiple rules, ' +
2000 'target %s rules %s and %s') % \
2001 (rule_extension, target,
2002 rule_extensions[rule_extension]['rule_name'],
2003 rule_name)
2004 rule_extensions[rule_extension] = rule
2005
2006 # Make sure rule_sources isn't already there. It's going to be
2007 # created below if needed.
2008 if 'rule_sources' in rule:
2009 raise KeyError, \
2010 'rule_sources must not exist in input, target %s rule %s' % \
2011 (target, rule_name)
2012 extension = rule['extension']
2013
2014 rule_sources = []
2015 source_keys = ['sources']
2016 source_keys.extend(extra_sources_for_rules)
2017 for source_key in source_keys:
2018 for source in target_dict.get(source_key, []):
2019 (source_root, source_extension) = os.path.splitext(source)
2020 if source_extension.startswith('.'):
2021 source_extension = source_extension[1:]
2022 if source_extension == extension:
2023 rule_sources.append(source)
2024
2025 if len(rule_sources) > 0:
2026 rule['rule_sources'] = rule_sources
2027
2028
2029 def ValidateActionsInTarget(target, target_dict, build_file):
2030 '''Validates the inputs to the actions in a target.'''
2031 target_name = target_dict.get('target_name')
2032 actions = target_dict.get('actions', [])
2033 for action in actions:
2034 action_name = action.get('action_name')
2035 if not action_name:
2036 raise Exception("Anonymous action in target %s. "
2037 "An action must have an 'action_name' field." %
2038 target_name)
2039 inputs = action.get('inputs', [])
2040
2041
2042 def ValidateRunAsInTarget(target, target_dict, build_file):
2043 target_name = target_dict.get('target_name')
2044 run_as = target_dict.get('run_as')
2045 if not run_as:
2046 return
2047 if not isinstance(run_as, dict):
2048 raise Exception("The 'run_as' in target %s from file %s should be a "
2049 "dictionary." %
2050 (target_name, build_file))
2051 action = run_as.get('action')
2052 if not action:
2053 raise Exception("The 'run_as' in target %s from file %s must have an "
2054 "'action' section." %
2055 (target_name, build_file))
2056 if not isinstance(action, list):
2057 raise Exception("The 'action' for 'run_as' in target %s from file %s "
2058 "must be a list." %
2059 (target_name, build_file))
2060 working_directory = run_as.get('working_directory')
2061 if working_directory and not isinstance(working_directory, str):
2062 raise Exception("The 'working_directory' for 'run_as' in target %s "
2063 "in file %s should be a string." %
2064 (target_name, build_file))
2065 environment = run_as.get('environment')
2066 if environment and not isinstance(environment, dict):
2067 raise Exception("The 'environment' for 'run_as' in target %s "
2068 "in file %s should be a dictionary." %
2069 (target_name, build_file))
2070
2071
2072 def TurnIntIntoStrInDict(the_dict):
2073 """Given dict the_dict, recursively converts all integers into strings.
2074 """
2075 # Use items instead of iteritems because there's no need to try to look at
2076 # reinserted keys and their associated values.
2077 for k, v in the_dict.items():
2078 if isinstance(v, int):
2079 v = str(v)
2080 the_dict[k] = v
2081 elif isinstance(v, dict):
2082 TurnIntIntoStrInDict(v)
2083 elif isinstance(v, list):
2084 TurnIntIntoStrInList(v)
2085
2086 if isinstance(k, int):
2087 the_dict[str(k)] = v
2088 del the_dict[k]
2089
2090
2091 def TurnIntIntoStrInList(the_list):
2092 """Given list the_list, recursively converts all integers into strings.
2093 """
2094 for index in xrange(0, len(the_list)):
2095 item = the_list[index]
2096 if isinstance(item, int):
2097 the_list[index] = str(item)
2098 elif isinstance(item, dict):
2099 TurnIntIntoStrInDict(item)
2100 elif isinstance(item, list):
2101 TurnIntIntoStrInList(item)
2102
2103
2104 def Load(build_files, variables, includes, depth, generator_input_info, check,
2105 circular_check):
2106 # Set up path_sections and non_configuration_keys with the default data plus
2107 # the generator-specifc data.
2108 global path_sections
2109 path_sections = base_path_sections[:]
2110 path_sections.extend(generator_input_info['path_sections'])
2111
2112 global non_configuration_keys
2113 non_configuration_keys = base_non_configuration_keys[:]
2114 non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2115
2116 # TODO(mark) handle variants if the generator doesn't want them directly.
2117 generator_handles_variants = \
2118 generator_input_info['generator_handles_variants']
2119
2120 global absolute_build_file_paths
2121 absolute_build_file_paths = \
2122 generator_input_info['generator_wants_absolute_build_file_paths']
2123
2124 global multiple_toolsets
2125 multiple_toolsets = generator_input_info[
2126 'generator_supports_multiple_toolsets']
2127
2128 # A generator can have other lists (in addition to sources) be processed
2129 # for rules.
2130 extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2131
2132 # Load build files. This loads every target-containing build file into
2133 # the |data| dictionary such that the keys to |data| are build file names,
2134 # and the values are the entire build file contents after "early" or "pre"
2135 # processing has been done and includes have been resolved.
2136 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2137 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2138 # track of the keys corresponding to "target" files.
2139 data = {'target_build_files': set()}
2140 aux_data = {}
2141 for build_file in build_files:
2142 # Normalize paths everywhere. This is important because paths will be
2143 # used as keys to the data dict and for references between input files.
2144 build_file = os.path.normpath(build_file)
2145 try:
2146 LoadTargetBuildFile(build_file, data, aux_data, variables, includes,
2147 depth, check)
2148 except Exception, e:
2149 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2150 raise
2151
2152 # Build a dict to access each target's subdict by qualified name.
2153 targets = BuildTargetsDict(data)
2154
2155 # Fully qualify all dependency links.
2156 QualifyDependencies(targets)
2157
2158 # Expand dependencies specified as build_file:*.
2159 ExpandWildcardDependencies(targets, data)
2160
2161 if circular_check:
2162 # Make sure that any targets in a.gyp don't contain dependencies in other
2163 # .gyp files that further depend on a.gyp.
2164 VerifyNoGYPFileCircularDependencies(targets)
2165
2166 [dependency_nodes, flat_list] = BuildDependencyList(targets)
2167
2168 # Handle dependent settings of various types.
2169 for settings_type in ['all_dependent_settings',
2170 'direct_dependent_settings',
2171 'link_settings']:
2172 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2173
2174 # Take out the dependent settings now that they've been published to all
2175 # of the targets that require them.
2176 for target in flat_list:
2177 if settings_type in targets[target]:
2178 del targets[target][settings_type]
2179
2180 # Make sure static libraries don't declare dependencies on other static
2181 # libraries, but that linkables depend on all unlinked static libraries
2182 # that they need so that their link steps will be correct.
2183 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes)
2184
2185 # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2186 for target in flat_list:
2187 target_dict = targets[target]
2188 build_file = gyp.common.BuildFile(target)
2189 ProcessVariablesAndConditionsInDict(target_dict, True, variables,
2190 build_file)
2191
2192 # Move everything that can go into a "configurations" section into one.
2193 for target in flat_list:
2194 target_dict = targets[target]
2195 SetUpConfigurations(target, target_dict)
2196
2197 # Apply exclude (!) and regex (/) list filters.
2198 for target in flat_list:
2199 target_dict = targets[target]
2200 ProcessListFiltersInDict(target, target_dict)
2201
2202 # Make sure that the rules make sense, and build up rule_sources lists as
2203 # needed. Not all generators will need to use the rule_sources lists, but
2204 # some may, and it seems best to build the list in a common spot.
2205 # Also validate actions and run_as elements in targets.
2206 for target in flat_list:
2207 target_dict = targets[target]
2208 build_file = gyp.common.BuildFile(target)
2209 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2210 ValidateRunAsInTarget(target, target_dict, build_file)
2211 ValidateActionsInTarget(target, target_dict, build_file)
2212
2213 # Generators might not expect ints. Turn them into strs.
2214 TurnIntIntoStrInDict(data)
2215
2216 # TODO(mark): Return |data| for now because the generator needs a list of
2217 # build files that came in. In the future, maybe it should just accept
2218 # a list, and not the whole data dict.
2219 return [flat_list, targets, data]
+0
-2736
third_party/gyp/pylib/gyp/xcodeproj_file.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Xcode project file generator.
7
8 This module is both an Xcode project file generator and a documentation of the
9 Xcode project file format. Knowledge of the project file format was gained
10 based on extensive experience with Xcode, and by making changes to projects in
11 Xcode.app and observing the resultant changes in the associated project files.
12
13 XCODE PROJECT FILES
14
15 The generator targets the file format as written by Xcode 3.1 (specifically,
16 3.1.2), but past experience has taught that the format has not changed
17 significantly in the past several years, and future versions of Xcode are able
18 to read older project files.
19
20 Xcode project files are "bundled": the project "file" from an end-user's
21 perspective is actually a directory with an ".xcodeproj" extension. The
22 project file from this module's perspective is actually a file inside this
23 directory, always named "project.pbxproj". This file contains a complete
24 description of the project and is all that is needed to use the xcodeproj.
25 Other files contained in the xcodeproj directory are simply used to store
26 per-user settings, such as the state of various UI elements in the Xcode
27 application.
28
29 The project.pbxproj file is a property list, stored in a format almost
30 identical to the NeXTstep property list format. The file is able to carry
31 Unicode data, and is encoded in UTF-8. The root element in the property list
32 is a dictionary that contains several properties of minimal interest, and two
33 properties of immense interest. The most important property is a dictionary
34 named "objects". The entire structure of the project is represented by the
35 children of this property. The objects dictionary is keyed by unique 96-bit
36 values represented by 24 uppercase hexadecimal characters. Each value in the
37 objects dictionary is itself a dictionary, describing an individual object.
38
39 Each object in the dictionary is a member of a class, which is identified by
40 the "isa" property of each object. A variety of classes are represented in a
41 project file. Objects can refer to other objects by ID, using the 24-character
42 hexadecimal object key. A project's objects form a tree, with a root object
43 of class PBXProject at the root. As an example, the PBXProject object serves
44 as parent to an XCConfigurationList object defining the build configurations
45 used in the project, a PBXGroup object serving as a container for all files
46 referenced in the project, and a list of target objects, each of which defines
47 a target in the project. There are several different types of target object,
48 such as PBXNativeTarget and PBXAggregateTarget. In this module, this
49 relationship is expressed by having each target type derive from an abstract
50 base named XCTarget.
51
52 The project.pbxproj file's root dictionary also contains a property, sibling to
53 the "objects" dictionary, named "rootObject". The value of rootObject is a
54 24-character object key referring to the root PBXProject object in the
55 objects dictionary.
56
57 In Xcode, every file used as input to a target or produced as a final product
58 of a target must appear somewhere in the hierarchy rooted at the PBXGroup
59 object referenced by the PBXProject's mainGroup property. A PBXGroup is
60 generally represented as a folder in the Xcode application. PBXGroups can
61 contain other PBXGroups as well as PBXFileReferences, which are pointers to
62 actual files.
63
64 Each XCTarget contains a list of build phases, represented in this module by
65 the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
66 are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
67 "Compile Sources" and "Link Binary With Libraries" phases displayed in the
68 Xcode application. Files used as input to these phases (for example, source
69 files in the former case and libraries and frameworks in the latter) are
70 represented by PBXBuildFile objects, referenced by elements of "files" lists
71 in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
72 object as a "weak" reference: it does not "own" the PBXBuildFile, which is
73 owned by the root object's mainGroup or a descendant group. In most cases, the
74 layer of indirection between an XCBuildPhase and a PBXFileReference via a
75 PBXBuildFile appears extraneous, but there's actually one reason for this:
76 file-specific compiler flags are added to the PBXBuildFile object so as to
77 allow a single file to be a member of multiple targets while having distinct
78 compiler flags for each. These flags can be modified in the Xcode applciation
79 in the "Build" tab of a File Info window.
80
81 When a project is open in the Xcode application, Xcode will rewrite it. As
82 such, this module is careful to adhere to the formatting used by Xcode, to
83 avoid insignificant changes appearing in the file when it is used in the
84 Xcode application. This will keep version control repositories happy, and
85 makes it possible to compare a project file used in Xcode to one generated by
86 this module to determine if any significant changes were made in the
87 application.
88
89 Xcode has its own way of assigning 24-character identifiers to each object,
90 which is not duplicated here. Because the identifier only is only generated
91 once, when an object is created, and is then left unchanged, there is no need
92 to attempt to duplicate Xcode's behavior in this area. The generator is free
93 to select any identifier, even at random, to refer to the objects it creates,
94 and Xcode will retain those identifiers and use them when subsequently
95 rewriting the project file. However, the generator would choose new random
96 identifiers each time the project files are generated, leading to difficulties
97 comparing "used" project files to "pristine" ones produced by this module,
98 and causing the appearance of changes as every object identifier is changed
99 when updated projects are checked in to a version control repository. To
100 mitigate this problem, this module chooses identifiers in a more deterministic
101 way, by hashing a description of each object as well as its parent and ancestor
102 objects. This strategy should result in minimal "shift" in IDs as successive
103 generations of project files are produced.
104
105 THIS MODULE
106
107 This module introduces several classes, all derived from the XCObject class.
108 Nearly all of the "brains" are built into the XCObject class, which understands
109 how to create and modify objects, maintain the proper tree structure, compute
110 identifiers, and print objects. For the most part, classes derived from
111 XCObject need only provide a _schema class object, a dictionary that
112 expresses what properties objects of the class may contain.
113
114 Given this structure, it's possible to build a minimal project file by creating
115 objects of the appropriate types and making the proper connections:
116
117 config_list = XCConfigurationList()
118 group = PBXGroup()
119 project = PBXProject({'buildConfigurationList': config_list,
120 'mainGroup': group})
121
122 With the project object set up, it can be added to an XCProjectFile object.
123 XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
124 subclass that does not actually correspond to a class type found in a project
125 file. Rather, it is used to represent the project file's root dictionary.
126 Printing an XCProjectFile will print the entire project file, including the
127 full "objects" dictionary.
128
129 project_file = XCProjectFile({'rootObject': project})
130 project_file.ComputeIDs()
131 project_file.Print()
132
133 Xcode project files are always encoded in UTF-8. This module will accept
134 strings of either the str class or the unicode class. Strings of class str
135 are assumed to already be encoded in UTF-8. Obviously, if you're just using
136 ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
137 Strings of class unicode are handled properly and encoded in UTF-8 when
138 a project file is output.
139 """
140
141 import gyp.common
142 import posixpath
143 import re
144 import struct
145 import sys
146
147 # hashlib is supplied as of Python 2.5 as the replacement interface for sha
148 # and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
149 # available, avoiding a deprecation warning under 2.6. Import sha otherwise,
150 # preserving 2.4 compatibility.
151 try:
152 import hashlib
153 _new_sha1 = hashlib.sha1
154 except ImportError:
155 import sha
156 _new_sha1 = sha.new
157
158
159 # See XCObject._EncodeString. This pattern is used to determine when a string
160 # can be printed unquoted. Strings that match this pattern may be printed
161 # unquoted. Strings that do not match must be quoted and may be further
162 # transformed to be properly encoded. Note that this expression matches the
163 # characters listed with "+", for 1 or more occurrences: if a string is empty,
164 # it must not match this pattern, because it needs to be encoded as "".
165 _unquoted = re.compile('^[A-Za-z0-9$./_]+$')
166
167 # Strings that match this pattern are quoted regardless of what _unquoted says.
168 # Oddly, Xcode will quote any string with a run of three or more underscores.
169 _quoted = re.compile('___')
170
171 # This pattern should match any character that needs to be escaped by
172 # XCObject._EncodeString. See that function.
173 _escaped = re.compile('[\\\\"]|[^ -~]')
174
175
176 # Used by SourceTreeAndPathFromPath
177 _path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$')
178
179 def SourceTreeAndPathFromPath(input_path):
180 """Given input_path, returns a tuple with sourceTree and path values.
181
182 Examples:
183 input_path (source_tree, output_path)
184 '$(VAR)/path' ('VAR', 'path')
185 '$(VAR)' ('VAR', None)
186 'path' (None, 'path')
187 """
188
189 source_group_match = _path_leading_variable.match(input_path)
190 if source_group_match:
191 source_tree = source_group_match.group(1)
192 output_path = source_group_match.group(3) # This may be None.
193 else:
194 source_tree = None
195 output_path = input_path
196
197 return (source_tree, output_path)
198
199 def ConvertVariablesToShellSyntax(input_string):
200 return re.sub('\$\((.*?)\)', '${\\1}', input_string)
201
202 class XCObject(object):
203 """The abstract base of all class types used in Xcode project files.
204
205 Class variables:
206 _schema: A dictionary defining the properties of this class. The keys to
207 _schema are string property keys as used in project files. Values
208 are a list of four or five elements:
209 [ is_list, property_type, is_strong, is_required, default ]
210 is_list: True if the property described is a list, as opposed
211 to a single element.
212 property_type: The type to use as the value of the property,
213 or if is_list is True, the type to use for each
214 element of the value's list. property_type must
215 be an XCObject subclass, or one of the built-in
216 types str, int, or dict.
217 is_strong: If property_type is an XCObject subclass, is_strong
218 is True to assert that this class "owns," or serves
219 as parent, to the property value (or, if is_list is
220 True, values). is_strong must be False if
221 property_type is not an XCObject subclass.
222 is_required: True if the property is required for the class.
223 Note that is_required being True does not preclude
224 an empty string ("", in the case of property_type
225 str) or list ([], in the case of is_list True) from
226 being set for the property.
227 default: Optional. If is_requried is True, default may be set
228 to provide a default value for objects that do not supply
229 their own value. If is_required is True and default
230 is not provided, users of the class must supply their own
231 value for the property.
232 Note that although the values of the array are expressed in
233 boolean terms, subclasses provide values as integers to conserve
234 horizontal space.
235 _should_print_single_line: False in XCObject. Subclasses whose objects
236 should be written to the project file in the
237 alternate single-line format, such as
238 PBXFileReference and PBXBuildFile, should
239 set this to True.
240 _encode_transforms: Used by _EncodeString to encode unprintable characters.
241 The index into this list is the ordinal of the
242 character to transform; each value is a string
243 used to represent the character in the output. XCObject
244 provides an _encode_transforms list suitable for most
245 XCObject subclasses.
246 _alternate_encode_transforms: Provided for subclasses that wish to use
247 the alternate encoding rules. Xcode seems
248 to use these rules when printing objects in
249 single-line format. Subclasses that desire
250 this behavior should set _encode_transforms
251 to _alternate_encode_transforms.
252 _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
253 to construct this object's ID. Most classes that need custom
254 hashing behavior should do it by overriding Hashables,
255 but in some cases an object's parent may wish to push a
256 hashable value into its child, and it can do so by appending
257 to _hashables.
258 Attribues:
259 id: The object's identifier, a 24-character uppercase hexadecimal string.
260 Usually, objects being created should not set id until the entire
261 project file structure is built. At that point, UpdateIDs() should
262 be called on the root object to assign deterministic values for id to
263 each object in the tree.
264 parent: The object's parent. This is set by a parent XCObject when a child
265 object is added to it.
266 _properties: The object's property dictionary. An object's properties are
267 described by its class' _schema variable.
268 """
269
270 _schema = {}
271 _should_print_single_line = False
272
273 # See _EncodeString.
274 _encode_transforms = []
275 i = 0
276 while i < ord(' '):
277 _encode_transforms.append('\\U%04x' % i)
278 i = i + 1
279 _encode_transforms[7] = '\\a'
280 _encode_transforms[8] = '\\b'
281 _encode_transforms[9] = '\\t'
282 _encode_transforms[10] = '\\n'
283 _encode_transforms[11] = '\\v'
284 _encode_transforms[12] = '\\f'
285 _encode_transforms[13] = '\\n'
286
287 _alternate_encode_transforms = list(_encode_transforms)
288 _alternate_encode_transforms[9] = chr(9)
289 _alternate_encode_transforms[10] = chr(10)
290 _alternate_encode_transforms[11] = chr(11)
291
292 def __init__(self, properties=None, id=None, parent=None):
293 self.id = id
294 self.parent = parent
295 self._properties = {}
296 self._hashables = []
297 self._SetDefaultsFromSchema()
298 self.UpdateProperties(properties)
299
300 def __repr__(self):
301 try:
302 name = self.Name()
303 except NotImplementedError:
304 return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
305 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
306
307 def Copy(self):
308 """Make a copy of this object.
309
310 The new object will have its own copy of lists and dicts. Any XCObject
311 objects owned by this object (marked "strong") will be copied in the
312 new object, even those found in lists. If this object has any weak
313 references to other XCObjects, the same references are added to the new
314 object without making a copy.
315 """
316
317 that = self.__class__(id=self.id, parent=self.parent)
318 for key, value in self._properties.iteritems():
319 is_strong = self._schema[key][2]
320
321 if isinstance(value, XCObject):
322 if is_strong:
323 new_value = value.Copy()
324 new_value.parent = that
325 that._properties[key] = new_value
326 else:
327 that._properties[key] = value
328 elif isinstance(value, str) or isinstance(value, unicode) or \
329 isinstance(value, int):
330 that._properties[key] = value
331 elif isinstance(value, list):
332 if is_strong:
333 # If is_strong is True, each element is an XCObject, so it's safe to
334 # call Copy.
335 that._properties[key] = []
336 for item in value:
337 new_item = item.Copy()
338 new_item.parent = that
339 that._properties[key].append(new_item)
340 else:
341 that._properties[key] = value[:]
342 elif isinstance(value, dict):
343 # dicts are never strong.
344 if is_strong:
345 raise TypeError, 'Strong dict for key ' + key + ' in ' + \
346 self.__class__.__name__
347 else:
348 that._properties[key] = value.copy()
349 else:
350 raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \
351 ' for key ' + key + ' in ' + self.__class__.__name__
352
353 return that
354
355 def Name(self):
356 """Return the name corresponding to an object.
357
358 Not all objects necessarily need to be nameable, and not all that do have
359 a "name" property. Override as needed.
360 """
361
362 # If the schema indicates that "name" is required, try to access the
363 # property even if it doesn't exist. This will result in a KeyError
364 # being raised for the property that should be present, which seems more
365 # appropriate than NotImplementedError in this case.
366 if 'name' in self._properties or \
367 ('name' in self._schema and self._schema['name'][3]):
368 return self._properties['name']
369
370 raise NotImplementedError, \
371 self.__class__.__name__ + ' must implement Name'
372
373 def Comment(self):
374 """Return a comment string for the object.
375
376 Most objects just use their name as the comment, but PBXProject uses
377 different values.
378
379 The returned comment is not escaped and does not have any comment marker
380 strings applied to it.
381 """
382
383 return self.Name()
384
385 def Hashables(self):
386 hashables = [self.__class__.__name__]
387
388 name = self.Name()
389 if name != None:
390 hashables.append(name)
391
392 hashables.extend(self._hashables)
393
394 return hashables
395
396 def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
397 """Set "id" properties deterministically.
398
399 An object's "id" property is set based on a hash of its class type and
400 name, as well as the class type and name of all ancestor objects. As
401 such, it is only advisable to call ComputeIDs once an entire project file
402 tree is built.
403
404 If recursive is True, recurse into all descendant objects and update their
405 hashes.
406
407 If overwrite is True, any existing value set in the "id" property will be
408 replaced.
409 """
410
411 def _HashUpdate(hash, data):
412 """Update hash with data's length and contents.
413
414 If the hash were updated only with the value of data, it would be
415 possible for clowns to induce collisions by manipulating the names of
416 their objects. By adding the length, it's exceedingly less likely that
417 ID collisions will be encountered, intentionally or not.
418 """
419
420 hash.update(struct.pack('>i', len(data)))
421 hash.update(data)
422
423 if hash == None:
424 hash = _new_sha1()
425
426 hashables = self.Hashables()
427 assert len(hashables) > 0
428 for hashable in hashables:
429 _HashUpdate(hash, hashable)
430
431 if recursive:
432 for child in self.Children():
433 child.ComputeIDs(recursive, overwrite, hash.copy())
434
435 if overwrite or self.id == None:
436 # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
437 # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
438 # into the portion that gets used.
439 assert hash.digest_size % 4 == 0
440 digest_int_count = hash.digest_size / 4
441 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
442 id_ints = [0, 0, 0]
443 for index in xrange(0, digest_int_count):
444 id_ints[index % 3] ^= digest_ints[index]
445 self.id = '%08X%08X%08X' % tuple(id_ints)
446
447 def EnsureNoIDCollisions(self):
448 """Verifies that no two objects have the same ID. Checks all descendants.
449 """
450
451 ids = {}
452 descendants = self.Descendants()
453 for descendant in descendants:
454 if descendant.id in ids:
455 other = ids[descendant.id]
456 raise KeyError, \
457 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
458 (descendant.id, str(descendant._properties),
459 str(other._properties), self._properties['rootObject'].Name())
460 ids[descendant.id] = descendant
461
462 def Children(self):
463 """Returns a list of all of this object's owned (strong) children."""
464
465 children = []
466 for property, attributes in self._schema.iteritems():
467 (is_list, property_type, is_strong) = attributes[0:3]
468 if is_strong and property in self._properties:
469 if not is_list:
470 children.append(self._properties[property])
471 else:
472 children.extend(self._properties[property])
473 return children
474
475 def Descendants(self):
476 """Returns a list of all of this object's descendants, including this
477 object.
478 """
479
480 children = self.Children()
481 descendants = [self]
482 for child in children:
483 descendants.extend(child.Descendants())
484 return descendants
485
486 def PBXProjectAncestor(self):
487 # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
488 if self.parent:
489 return self.parent.PBXProjectAncestor()
490 return None
491
492 def _EncodeComment(self, comment):
493 """Encodes a comment to be placed in the project file output, mimicing
494 Xcode behavior.
495 """
496
497 # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
498 # the string already contains a "*/", it is turned into "(*)/". This keeps
499 # the file writer from outputting something that would be treated as the
500 # end of a comment in the middle of something intended to be entirely a
501 # comment.
502
503 return '/* ' + comment.replace('*/', '(*)/') + ' */'
504
505 def _EncodeTransform(self, match):
506 # This function works closely with _EncodeString. It will only be called
507 # by re.sub with match.group(0) containing a character matched by the
508 # the _escaped expression.
509 char = match.group(0)
510
511 # Backslashes (\) and quotation marks (") are always replaced with a
512 # backslash-escaped version of the same. Everything else gets its
513 # replacement from the class' _encode_transforms array.
514 if char == '\\':
515 return '\\\\'
516 if char == '"':
517 return '\\"'
518 return self._encode_transforms[ord(char)]
519
520 def _EncodeString(self, value):
521 """Encodes a string to be placed in the project file output, mimicing
522 Xcode behavior.
523 """
524
525 # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
526 # $ (dollar sign), . (period), and _ (underscore) is present. Also use
527 # quotation marks to represent empty strings.
528 #
529 # Escape " (double-quote) and \ (backslash) by preceding them with a
530 # backslash.
531 #
532 # Some characters below the printable ASCII range are encoded specially:
533 # 7 ^G BEL is encoded as "\a"
534 # 8 ^H BS is encoded as "\b"
535 # 11 ^K VT is encoded as "\v"
536 # 12 ^L NP is encoded as "\f"
537 # 127 ^? DEL is passed through as-is without escaping
538 # - In PBXFileReference and PBXBuildFile objects:
539 # 9 ^I HT is passed through as-is without escaping
540 # 10 ^J NL is passed through as-is without escaping
541 # 13 ^M CR is passed through as-is without escaping
542 # - In other objects:
543 # 9 ^I HT is encoded as "\t"
544 # 10 ^J NL is encoded as "\n"
545 # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
546 # 10 ^J NL
547 # All other nonprintable characters within the ASCII range (0 through 127
548 # inclusive) are encoded as "\U001f" referring to the Unicode code point in
549 # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
550 # Characters above the ASCII range are passed through to the output encoded
551 # as UTF-8 without any escaping. These mappings are contained in the
552 # class' _encode_transforms list.
553
554 if _unquoted.search(value) and not _quoted.search(value):
555 return value
556
557 return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
558
559 def _XCPrint(self, file, tabs, line):
560 file.write('\t' * tabs + line)
561
562 def _XCPrintableValue(self, tabs, value, flatten_list=False):
563 """Returns a representation of value that may be printed in a project file,
564 mimicing Xcode's behavior.
565
566 _XCPrintableValue can handle str and int values, XCObjects (which are
567 made printable by returning their id property), and list and dict objects
568 composed of any of the above types. When printing a list or dict, and
569 _should_print_single_line is False, the tabs parameter is used to determine
570 how much to indent the lines corresponding to the items in the list or
571 dict.
572
573 If flatten_list is True, single-element lists will be transformed into
574 strings.
575 """
576
577 printable = ''
578 comment = None
579
580 if self._should_print_single_line:
581 sep = ' '
582 element_tabs = ''
583 end_tabs = ''
584 else:
585 sep = '\n'
586 element_tabs = '\t' * (tabs + 1)
587 end_tabs = '\t' * tabs
588
589 if isinstance(value, XCObject):
590 printable += value.id
591 comment = value.Comment()
592 elif isinstance(value, str):
593 printable += self._EncodeString(value)
594 elif isinstance(value, unicode):
595 printable += self._EncodeString(value.encode('utf-8'))
596 elif isinstance(value, int):
597 printable += str(value)
598 elif isinstance(value, list):
599 if flatten_list and len(value) <= 1:
600 if len(value) == 0:
601 printable += self._EncodeString('')
602 else:
603 printable += self._EncodeString(value[0])
604 else:
605 printable = '(' + sep
606 for item in value:
607 printable += element_tabs + \
608 self._XCPrintableValue(tabs + 1, item, flatten_list) + \
609 ',' + sep
610 printable += end_tabs + ')'
611 elif isinstance(value, dict):
612 printable = '{' + sep
613 for item_key, item_value in sorted(value.iteritems()):
614 printable += element_tabs + \
615 self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
616 self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
617 sep
618 printable += end_tabs + '}'
619 else:
620 raise TypeError, "Can't make " + value.__class__.__name__ + ' printable'
621
622 if comment != None:
623 printable += ' ' + self._EncodeComment(comment)
624
625 return printable
626
627 def _XCKVPrint(self, file, tabs, key, value):
628 """Prints a key and value, members of an XCObject's _properties dictionary,
629 to file.
630
631 tabs is an int identifying the indentation level. If the class'
632 _should_print_single_line variable is True, tabs is ignored and the
633 key-value pair will be followed by a space insead of a newline.
634 """
635
636 if self._should_print_single_line:
637 printable = ''
638 after_kv = ' '
639 else:
640 printable = '\t' * tabs
641 after_kv = '\n'
642
643 # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
644 # objects without comments. Sometimes it prints them with comments, but
645 # the majority of the time, it doesn't. To avoid unnecessary changes to
646 # the project file after Xcode opens it, don't write comments for
647 # remoteGlobalIDString. This is a sucky hack and it would certainly be
648 # cleaner to extend the schema to indicate whether or not a comment should
649 # be printed, but since this is the only case where the problem occurs and
650 # Xcode itself can't seem to make up its mind, the hack will suffice.
651 #
652 # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
653 if key == 'remoteGlobalIDString' and isinstance(self,
654 PBXContainerItemProxy):
655 value_to_print = value.id
656 else:
657 value_to_print = value
658
659 # In another one-off, let's set flatten_list on buildSettings properties
660 # of XCBuildConfiguration objects, because that's how Xcode treats them.
661 if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
662 flatten_list = True
663 else:
664 flatten_list = False
665
666 try:
667 printable += self._XCPrintableValue(tabs, key, flatten_list) + ' = ' + \
668 self._XCPrintableValue(tabs, value_to_print, flatten_list) + \
669 ';' + after_kv
670 except TypeError, e:
671 gyp.common.ExceptionAppend(e,
672 'while printing key "%s"' % key)
673 raise
674
675 self._XCPrint(file, 0, printable)
676
677 def Print(self, file=sys.stdout):
678 """Prints a reprentation of this object to file, adhering to Xcode output
679 formatting.
680 """
681
682 self.VerifyHasRequiredProperties()
683
684 if self._should_print_single_line:
685 # When printing an object in a single line, Xcode doesn't put any space
686 # between the beginning of a dictionary (or presumably a list) and the
687 # first contained item, so you wind up with snippets like
688 # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
689 # If it were me, I would have put a space in there after the opening
690 # curly, but I guess this is just another one of those inconsistencies
691 # between how Xcode prints PBXFileReference and PBXBuildFile objects as
692 # compared to other objects. Mimic Xcode's behavior here by using an
693 # empty string for sep.
694 sep = ''
695 end_tabs = 0
696 else:
697 sep = '\n'
698 end_tabs = 2
699
700 # Start the object. For example, '\t\tPBXProject = {\n'.
701 self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
702
703 # "isa" isn't in the _properties dictionary, it's an intrinsic property
704 # of the class which the object belongs to. Xcode always outputs "isa"
705 # as the first element of an object dictionary.
706 self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
707
708 # The remaining elements of an object dictionary are sorted alphabetically.
709 for property, value in sorted(self._properties.iteritems()):
710 self._XCKVPrint(file, 3, property, value)
711
712 # End the object.
713 self._XCPrint(file, end_tabs, '};\n')
714
715 def UpdateProperties(self, properties, do_copy=False):
716 """Merge the supplied properties into the _properties dictionary.
717
718 The input properties must adhere to the class schema or a KeyError or
719 TypeError exception will be raised. If adding an object of an XCObject
720 subclass and the schema indicates a strong relationship, the object's
721 parent will be set to this object.
722
723 If do_copy is True, then lists, dicts, strong-owned XCObjects, and
724 strong-owned XCObjects in lists will be copied instead of having their
725 references added.
726 """
727
728 if properties == None:
729 return
730
731 for property, value in properties.iteritems():
732 # Make sure the property is in the schema.
733 if not property in self._schema:
734 raise KeyError, property + ' not in ' + self.__class__.__name__
735
736 # Make sure the property conforms to the schema.
737 (is_list, property_type, is_strong) = self._schema[property][0:3]
738 if is_list:
739 if value.__class__ != list:
740 raise TypeError, \
741 property + ' of ' + self.__class__.__name__ + \
742 ' must be list, not ' + value.__class__.__name__
743 for item in value:
744 if not isinstance(item, property_type) and \
745 not (item.__class__ == unicode and property_type == str):
746 # Accept unicode where str is specified. str is treated as
747 # UTF-8-encoded.
748 raise TypeError, \
749 'item of ' + property + ' of ' + self.__class__.__name__ + \
750 ' must be ' + property_type.__name__ + ', not ' + \
751 item.__class__.__name__
752 elif not isinstance(value, property_type) and \
753 not (value.__class__ == unicode and property_type == str):
754 # Accept unicode where str is specified. str is treated as
755 # UTF-8-encoded.
756 raise TypeError, \
757 property + ' of ' + self.__class__.__name__ + ' must be ' + \
758 property_type.__name__ + ', not ' + value.__class__.__name__
759
760 # Checks passed, perform the assignment.
761 if do_copy:
762 if isinstance(value, XCObject):
763 if is_strong:
764 self._properties[property] = value.Copy()
765 else:
766 self._properties[property] = value
767 elif isinstance(value, str) or isinstance(value, unicode) or \
768 isinstance(value, int):
769 self._properties[property] = value
770 elif isinstance(value, list):
771 if is_strong:
772 # If is_strong is True, each element is an XCObject, so it's safe
773 # to call Copy.
774 self._properties[property] = []
775 for item in value:
776 self._properties[property].append(item.Copy())
777 else:
778 self._properties[property] = value[:]
779 elif isinstance(value, dict):
780 self._properties[property] = value.copy()
781 else:
782 raise TypeError, "Don't know how to copy a " + \
783 value.__class__.__name__ + ' object for ' + \
784 property + ' in ' + self.__class__.__name__
785 else:
786 self._properties[property] = value
787
788 # Set up the child's back-reference to this object. Don't use |value|
789 # any more because it may not be right if do_copy is true.
790 if is_strong:
791 if not is_list:
792 self._properties[property].parent = self
793 else:
794 for item in self._properties[property]:
795 item.parent = self
796
797 def HasProperty(self, key):
798 return key in self._properties
799
800 def GetProperty(self, key):
801 return self._properties[key]
802
803 def SetProperty(self, key, value):
804 self.UpdateProperties({key: value})
805
806 def DelProperty(self, key):
807 if key in self._properties:
808 del self._properties[key]
809
810 def AppendProperty(self, key, value):
811 # TODO(mark): Support ExtendProperty too (and make this call that)?
812
813 # Schema validation.
814 if not key in self._schema:
815 raise KeyError, key + ' not in ' + self.__class__.__name__
816
817 (is_list, property_type, is_strong) = self._schema[key][0:3]
818 if not is_list:
819 raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list'
820 if not isinstance(value, property_type):
821 raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \
822 ' must be ' + property_type.__name__ + ', not ' + \
823 value.__class__.__name__
824
825 # If the property doesn't exist yet, create a new empty list to receive the
826 # item.
827 if not key in self._properties:
828 self._properties[key] = []
829
830 # Set up the ownership link.
831 if is_strong:
832 value.parent = self
833
834 # Store the item.
835 self._properties[key].append(value)
836
837 def VerifyHasRequiredProperties(self):
838 """Ensure that all properties identified as required by the schema are
839 set.
840 """
841
842 # TODO(mark): A stronger verification mechanism is needed. Some
843 # subclasses need to perform validation beyond what the schema can enforce.
844 for property, attributes in self._schema.iteritems():
845 (is_list, property_type, is_strong, is_required) = attributes[0:4]
846 if is_required and not property in self._properties:
847 raise KeyError, self.__class__.__name__ + ' requires ' + property
848
849 def _SetDefaultsFromSchema(self):
850 """Assign object default values according to the schema. This will not
851 overwrite properties that have already been set."""
852
853 defaults = {}
854 for property, attributes in self._schema.iteritems():
855 (is_list, property_type, is_strong, is_required) = attributes[0:4]
856 if is_required and len(attributes) >= 5 and \
857 not property in self._properties:
858 default = attributes[4]
859
860 defaults[property] = default
861
862 if len(defaults) > 0:
863 # Use do_copy=True so that each new object gets its own copy of strong
864 # objects, lists, and dicts.
865 self.UpdateProperties(defaults, do_copy=True)
866
867
868 class XCHierarchicalElement(XCObject):
869 """Abstract base for PBXGroup and PBXFileReference. Not represented in a
870 project file."""
871
872 # TODO(mark): Do name and path belong here? Probably so.
873 # If path is set and name is not, name may have a default value. Name will
874 # be set to the basename of path, if the basename of path is different from
875 # the full value of path. If path is already just a leaf name, name will
876 # not be set.
877 _schema = XCObject._schema.copy()
878 _schema.update({
879 'comments': [0, str, 0, 0],
880 'fileEncoding': [0, str, 0, 0],
881 'includeInIndex': [0, int, 0, 0],
882 'indentWidth': [0, int, 0, 0],
883 'lineEnding': [0, int, 0, 0],
884 'sourceTree': [0, str, 0, 1, '<group>'],
885 'tabWidth': [0, int, 0, 0],
886 'usesTabs': [0, int, 0, 0],
887 'wrapsLines': [0, int, 0, 0],
888 })
889
890 def __init__(self, properties=None, id=None, parent=None):
891 # super
892 XCObject.__init__(self, properties, id, parent)
893 if 'path' in self._properties and not 'name' in self._properties:
894 path = self._properties['path']
895 name = posixpath.basename(path)
896 if name != '' and path != name:
897 self.SetProperty('name', name)
898
899 if 'path' in self._properties and \
900 (not 'sourceTree' in self._properties or \
901 self._properties['sourceTree'] == '<group>'):
902 # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
903 # the variable out and make the path be relative to that variable by
904 # assigning the variable name as the sourceTree.
905 (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
906 if source_tree != None:
907 self._properties['sourceTree'] = source_tree
908 if path != None:
909 self._properties['path'] = path
910 if source_tree != None and path == None and \
911 not 'name' in self._properties:
912 # The path was of the form "$(SDKROOT)" with no path following it.
913 # This object is now relative to that variable, so it has no path
914 # attribute of its own. It does, however, keep a name.
915 del self._properties['path']
916 self._properties['name'] = source_tree
917
918 def Name(self):
919 if 'name' in self._properties:
920 return self._properties['name']
921 elif 'path' in self._properties:
922 return self._properties['path']
923 else:
924 # This happens in the case of the root PBXGroup.
925 return None
926
927 def Hashables(self):
928 """Custom hashables for XCHierarchicalElements.
929
930 XCHierarchicalElements are special. Generally, their hashes shouldn't
931 change if the paths don't change. The normal XCObject implementation of
932 Hashables adds a hashable for each object, which means that if
933 the hierarchical structure changes (possibly due to changes caused when
934 TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
935 the hashes will change. For example, if a project file initially contains
936 a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
937 a/b. If someone later adds a/f2 to the project file, a/b can no longer be
938 collapsed, and f1 winds up with parent b and grandparent a. That would
939 be sufficient to change f1's hash.
940
941 To counteract this problem, hashables for all XCHierarchicalElements except
942 for the main group (which has neither a name nor a path) are taken to be
943 just the set of path components. Because hashables are inherited from
944 parents, this provides assurance that a/b/f1 has the same set of hashables
945 whether its parent is b or a/b.
946
947 The main group is a special case. As it is permitted to have no name or
948 path, it is permitted to use the standard XCObject hash mechanism. This
949 is not considered a problem because there can be only one main group.
950 """
951
952 if self == self.PBXProjectAncestor()._properties['mainGroup']:
953 # super
954 return XCObject.Hashables(self)
955
956 hashables = []
957
958 # Put the name in first, ensuring that if TakeOverOnlyChild collapses
959 # children into a top-level group like "Source", the name always goes
960 # into the list of hashables without interfering with path components.
961 if 'name' in self._properties:
962 # Make it less likely for people to manipulate hashes by following the
963 # pattern of always pushing an object type value onto the list first.
964 hashables.append(self.__class__.__name__ + '.name')
965 hashables.append(self._properties['name'])
966
967 # NOTE: This still has the problem that if an absolute path is encountered,
968 # including paths with a sourceTree, they'll still inherit their parents'
969 # hashables, even though the paths aren't relative to their parents. This
970 # is not expected to be much of a problem in practice.
971 path = self.PathFromSourceTreeAndPath()
972 if path != None:
973 components = path.split(posixpath.sep)
974 for component in components:
975 hashables.append(self.__class__.__name__ + '.path')
976 hashables.append(component)
977
978 hashables.extend(self._hashables)
979
980 return hashables
981
982 def Compare(self, other):
983 # Allow comparison of these types. PBXGroup has the highest sort rank;
984 # PBXVariantGroup is treated as equal to PBXFileReference.
985 valid_class_types = {
986 PBXFileReference: 'file',
987 PBXGroup: 'group',
988 PBXVariantGroup: 'file',
989 }
990 self_type = valid_class_types[self.__class__]
991 other_type = valid_class_types[other.__class__]
992
993 if self_type == other_type:
994 # If the two objects are of the same sort rank, compare their names.
995 return cmp(self.Name(), other.Name())
996
997 # Otherwise, sort groups before everything else.
998 if self_type == 'group':
999 return -1
1000 return 1
1001
1002 def CompareRootGroup(self, other):
1003 # This function should be used only to compare direct children of the
1004 # containing PBXProject's mainGroup. These groups should appear in the
1005 # listed order.
1006 # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
1007 # generator should have a way of influencing this list rather than having
1008 # to hardcode for the generator here.
1009 order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
1010 'Build']
1011
1012 # If the groups aren't in the listed order, do a name comparison.
1013 # Otherwise, groups in the listed order should come before those that
1014 # aren't.
1015 self_name = self.Name()
1016 other_name = other.Name()
1017 self_in = isinstance(self, PBXGroup) and self_name in order
1018 other_in = isinstance(self, PBXGroup) and other_name in order
1019 if not self_in and not other_in:
1020 return self.Compare(other)
1021 if self_name in order and not other_name in order:
1022 return -1
1023 if other_name in order and not self_name in order:
1024 return 1
1025
1026 # If both groups are in the listed order, go by the defined order.
1027 self_index = order.index(self_name)
1028 other_index = order.index(other_name)
1029 if self_index < other_index:
1030 return -1
1031 if self_index > other_index:
1032 return 1
1033 return 0
1034
1035 def PathFromSourceTreeAndPath(self):
1036 # Turn the object's sourceTree and path properties into a single flat
1037 # string of a form comparable to the path parameter. If there's a
1038 # sourceTree property other than "<group>", wrap it in $(...) for the
1039 # comparison.
1040 components = []
1041 if self._properties['sourceTree'] != '<group>':
1042 components.append('$(' + self._properties['sourceTree'] + ')')
1043 if 'path' in self._properties:
1044 components.append(self._properties['path'])
1045
1046 if len(components) > 0:
1047 return posixpath.join(*components)
1048
1049 return None
1050
1051 def FullPath(self):
1052 # Returns a full path to self relative to the project file, or relative
1053 # to some other source tree. Start with self, and walk up the chain of
1054 # parents prepending their paths, if any, until no more parents are
1055 # available (project-relative path) or until a path relative to some
1056 # source tree is found.
1057 xche = self
1058 path = None
1059 while isinstance(xche, XCHierarchicalElement) and \
1060 (path == None or \
1061 (not path.startswith('/') and not path.startswith('$'))):
1062 this_path = xche.PathFromSourceTreeAndPath()
1063 if this_path != None and path != None:
1064 path = posixpath.join(this_path, path)
1065 elif this_path != None:
1066 path = this_path
1067 xche = xche.parent
1068
1069 return path
1070
1071
1072 class PBXGroup(XCHierarchicalElement):
1073 """
1074 Attributes:
1075 _children_by_path: Maps pathnames of children of this PBXGroup to the
1076 actual child XCHierarchicalElement objects.
1077 _variant_children_by_name_and_path: Maps (name, path) tuples of
1078 PBXVariantGroup children to the actual child PBXVariantGroup objects.
1079 """
1080
1081 _schema = XCHierarchicalElement._schema.copy()
1082 _schema.update({
1083 'children': [1, XCHierarchicalElement, 1, 1, []],
1084 'name': [0, str, 0, 0],
1085 'path': [0, str, 0, 0],
1086 })
1087
1088 def __init__(self, properties=None, id=None, parent=None):
1089 # super
1090 XCHierarchicalElement.__init__(self, properties, id, parent)
1091 self._children_by_path = {}
1092 self._variant_children_by_name_and_path = {}
1093 for child in self._properties.get('children', []):
1094 self._AddChildToDicts(child)
1095
1096 def _AddChildToDicts(self, child):
1097 # Sets up this PBXGroup object's dicts to reference the child properly.
1098 child_path = child.PathFromSourceTreeAndPath()
1099 if child_path:
1100 if child_path in self._children_by_path:
1101 raise ValueError, 'Found multiple children with path ' + child_path
1102 self._children_by_path[child_path] = child
1103
1104 if isinstance(child, PBXVariantGroup):
1105 child_name = child._properties.get('name', None)
1106 key = (child_name, child_path)
1107 if key in self._variant_children_by_name_and_path:
1108 raise ValueError, 'Found multiple PBXVariantGroup children with ' + \
1109 'name ' + str(child_name) + ' and path ' + \
1110 str(child_path)
1111 self._variant_children_by_name_and_path[key] = child
1112
1113 def AppendChild(self, child):
1114 # Callers should use this instead of calling
1115 # AppendProperty('children', child) directly because this function
1116 # maintains the group's dicts.
1117 self.AppendProperty('children', child)
1118 self._AddChildToDicts(child)
1119
1120 def GetChildByName(self, name):
1121 # This is not currently optimized with a dict as GetChildByPath is because
1122 # it has few callers. Most callers probably want GetChildByPath. This
1123 # function is only useful to get children that have names but no paths,
1124 # which is rare. The children of the main group ("Source", "Products",
1125 # etc.) is pretty much the only case where this likely to come up.
1126 #
1127 # TODO(mark): Maybe this should raise an error if more than one child is
1128 # present with the same name.
1129 if not 'children' in self._properties:
1130 return None
1131
1132 for child in self._properties['children']:
1133 if child.Name() == name:
1134 return child
1135
1136 return None
1137
1138 def GetChildByPath(self, path):
1139 if not path:
1140 return None
1141
1142 if path in self._children_by_path:
1143 return self._children_by_path[path]
1144
1145 return None
1146
1147 def GetChildByRemoteObject(self, remote_object):
1148 # This method is a little bit esoteric. Given a remote_object, which
1149 # should be a PBXFileReference in another project file, this method will
1150 # return this group's PBXReferenceProxy object serving as a local proxy
1151 # for the remote PBXFileReference.
1152 #
1153 # This function might benefit from a dict optimization as GetChildByPath
1154 # for some workloads, but profiling shows that it's not currently a
1155 # problem.
1156 if not 'children' in self._properties:
1157 return None
1158
1159 for child in self._properties['children']:
1160 if not isinstance(child, PBXReferenceProxy):
1161 continue
1162
1163 container_proxy = child._properties['remoteRef']
1164 if container_proxy._properties['remoteGlobalIDString'] == remote_object:
1165 return child
1166
1167 return None
1168
1169 def AddOrGetFileByPath(self, path, hierarchical):
1170 """Returns an existing or new file reference corresponding to path.
1171
1172 If hierarchical is True, this method will create or use the necessary
1173 hierarchical group structure corresponding to path. Otherwise, it will
1174 look in and create an item in the current group only.
1175
1176 If an existing matching reference is found, it is returned, otherwise, a
1177 new one will be created, added to the correct group, and returned.
1178
1179 If path identifies a directory by virtue of carrying a trailing slash,
1180 this method returns a PBXFileReference of "folder" type. If path
1181 identifies a variant, by virtue of it identifying a file inside a directory
1182 with an ".lproj" extension, this method returns a PBXVariantGroup
1183 containing the variant named by path, and possibly other variants. For
1184 all other paths, a "normal" PBXFileReference will be returned.
1185 """
1186
1187 # Adding or getting a directory? Directories end with a trailing slash.
1188 is_dir = False
1189 if path.endswith('/'):
1190 is_dir = True
1191 normpath = posixpath.normpath(path)
1192 if is_dir:
1193 normpath = path + '/'
1194 else:
1195 normpath = path
1196
1197 # Adding or getting a variant? Variants are files inside directories
1198 # with an ".lproj" extension. Xcode uses variants for localization. For
1199 # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
1200 # MainMenu.nib inside path/to, and give it a variant named Language. In
1201 # this example, grandparent would be set to path/to and parent_root would
1202 # be set to Language.
1203 variant_name = None
1204 parent = posixpath.dirname(path)
1205 grandparent = posixpath.dirname(parent)
1206 parent_basename = posixpath.basename(parent)
1207 (parent_root, parent_ext) = posixpath.splitext(parent_basename)
1208 if parent_ext == '.lproj':
1209 variant_name = parent_root
1210 if grandparent == '':
1211 grandparent = None
1212
1213 # Putting a directory inside a variant group is not currently supported.
1214 assert not is_dir or variant_name == None
1215
1216 path_split = path.split(posixpath.sep)
1217 if len(path_split) == 1 or \
1218 ((is_dir or variant_name != None) and len(path_split) == 2) or \
1219 not hierarchical:
1220 # The PBXFileReference or PBXVariantGroup will be added to or gotten from
1221 # this PBXGroup, no recursion necessary.
1222 if variant_name == None:
1223 # Add or get a PBXFileReference.
1224 file_ref = self.GetChildByPath(normpath)
1225 if file_ref != None:
1226 assert file_ref.__class__ == PBXFileReference
1227 else:
1228 file_ref = PBXFileReference({'path': path})
1229 self.AppendChild(file_ref)
1230 else:
1231 # Add or get a PBXVariantGroup. The variant group name is the same
1232 # as the basename (MainMenu.nib in the example above). grandparent
1233 # specifies the path to the variant group itself, and path_split[-2:]
1234 # is the path of the specific variant relative to its group.
1235 variant_group_name = posixpath.basename(path)
1236 variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
1237 variant_group_name, grandparent)
1238 variant_path = posixpath.sep.join(path_split[-2:])
1239 variant_ref = variant_group_ref.GetChildByPath(variant_path)
1240 if variant_ref != None:
1241 assert variant_ref.__class__ == PBXFileReference
1242 else:
1243 variant_ref = PBXFileReference({'name': variant_name,
1244 'path': variant_path})
1245 variant_group_ref.AppendChild(variant_ref)
1246 # The caller is interested in the variant group, not the specific
1247 # variant file.
1248 file_ref = variant_group_ref
1249 return file_ref
1250 else:
1251 # Hierarchical recursion. Add or get a PBXGroup corresponding to the
1252 # outermost path component, and then recurse into it, chopping off that
1253 # path component.
1254 next_dir = path_split[0]
1255 group_ref = self.GetChildByPath(next_dir)
1256 if group_ref != None:
1257 assert group_ref.__class__ == PBXGroup
1258 else:
1259 group_ref = PBXGroup({'path': next_dir})
1260 self.AppendChild(group_ref)
1261 return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
1262 hierarchical)
1263
1264 def AddOrGetVariantGroupByNameAndPath(self, name, path):
1265 """Returns an existing or new PBXVariantGroup for name and path.
1266
1267 If a PBXVariantGroup identified by the name and path arguments is already
1268 present as a child of this object, it is returned. Otherwise, a new
1269 PBXVariantGroup with the correct properties is created, added as a child,
1270 and returned.
1271
1272 This method will generally be called by AddOrGetFileByPath, which knows
1273 when to create a variant group based on the structure of the pathnames
1274 passed to it.
1275 """
1276
1277 key = (name, path)
1278 if key in self._variant_children_by_name_and_path:
1279 variant_group_ref = self._variant_children_by_name_and_path[key]
1280 assert variant_group_ref.__class__ == PBXVariantGroup
1281 return variant_group_ref
1282
1283 variant_group_properties = {'name': name}
1284 if path != None:
1285 variant_group_properties['path'] = path
1286 variant_group_ref = PBXVariantGroup(variant_group_properties)
1287 self.AppendChild(variant_group_ref)
1288
1289 return variant_group_ref
1290
1291 def TakeOverOnlyChild(self, recurse=False):
1292 """If this PBXGroup has only one child and it's also a PBXGroup, take
1293 it over by making all of its children this object's children.
1294
1295 This function will continue to take over only children when those children
1296 are groups. If there are three PBXGroups representing a, b, and c, with
1297 c inside b and b inside a, and a and b have no other children, this will
1298 result in a taking over both b and c, forming a PBXGroup for a/b/c.
1299
1300 If recurse is True, this function will recurse into children and ask them
1301 to collapse themselves by taking over only children as well. Assuming
1302 an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
1303 (d1, d2, and f are files, the rest are groups), recursion will result in
1304 a group for a/b/c containing a group for d3/e.
1305 """
1306
1307 # At this stage, check that child class types are PBXGroup exactly,
1308 # instead of using isinstance. The only subclass of PBXGroup,
1309 # PBXVariantGroup, should not participate in reparenting in the same way:
1310 # reparenting by merging different object types would be wrong.
1311 while len(self._properties['children']) == 1 and \
1312 self._properties['children'][0].__class__ == PBXGroup:
1313 # Loop to take over the innermost only-child group possible.
1314
1315 child = self._properties['children'][0]
1316
1317 # Assume the child's properties, including its children. Save a copy
1318 # of this object's old properties, because they'll still be needed.
1319 # This object retains its existing id and parent attributes.
1320 old_properties = self._properties
1321 self._properties = child._properties
1322 self._children_by_path = child._children_by_path
1323
1324 if not 'sourceTree' in self._properties or \
1325 self._properties['sourceTree'] == '<group>':
1326 # The child was relative to its parent. Fix up the path. Note that
1327 # children with a sourceTree other than "<group>" are not relative to
1328 # their parents, so no path fix-up is needed in that case.
1329 if 'path' in old_properties:
1330 if 'path' in self._properties:
1331 # Both the original parent and child have paths set.
1332 self._properties['path'] = posixpath.join(old_properties['path'],
1333 self._properties['path'])
1334 else:
1335 # Only the original parent has a path, use it.
1336 self._properties['path'] = old_properties['path']
1337 if 'sourceTree' in old_properties:
1338 # The original parent had a sourceTree set, use it.
1339 self._properties['sourceTree'] = old_properties['sourceTree']
1340
1341 # If the original parent had a name set, keep using it. If the original
1342 # parent didn't have a name but the child did, let the child's name
1343 # live on. If the name attribute seems unnecessary now, get rid of it.
1344 if 'name' in old_properties and old_properties['name'] != None and \
1345 old_properties['name'] != self.Name():
1346 self._properties['name'] = old_properties['name']
1347 if 'name' in self._properties and 'path' in self._properties and \
1348 self._properties['name'] == self._properties['path']:
1349 del self._properties['name']
1350
1351 # Notify all children of their new parent.
1352 for child in self._properties['children']:
1353 child.parent = self
1354
1355 # If asked to recurse, recurse.
1356 if recurse:
1357 for child in self._properties['children']:
1358 if child.__class__ == PBXGroup:
1359 child.TakeOverOnlyChild(recurse)
1360
1361 def SortGroup(self):
1362 self._properties['children'] = \
1363 sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
1364
1365 # Recurse.
1366 for child in self._properties['children']:
1367 if isinstance(child, PBXGroup):
1368 child.SortGroup()
1369
1370
1371 class XCFileLikeElement(XCHierarchicalElement):
1372 # Abstract base for objects that can be used as the fileRef property of
1373 # PBXBuildFile.
1374
1375 def PathHashables(self):
1376 # A PBXBuildFile that refers to this object will call this method to
1377 # obtain additional hashables specific to this XCFileLikeElement. Don't
1378 # just use this object's hashables, they're not specific and unique enough
1379 # on their own (without access to the parent hashables.) Instead, provide
1380 # hashables that identify this object by path by getting its hashables as
1381 # well as the hashables of ancestor XCHierarchicalElement objects.
1382
1383 hashables = []
1384 xche = self
1385 while xche != None and isinstance(xche, XCHierarchicalElement):
1386 xche_hashables = xche.Hashables()
1387 for index in xrange(0, len(xche_hashables)):
1388 hashables.insert(index, xche_hashables[index])
1389 xche = xche.parent
1390 return hashables
1391
1392
1393 class XCContainerPortal(XCObject):
1394 # Abstract base for objects that can be used as the containerPortal property
1395 # of PBXContainerItemProxy.
1396 pass
1397
1398
1399 class XCRemoteObject(XCObject):
1400 # Abstract base for objects that can be used as the remoteGlobalIDString
1401 # property of PBXContainerItemProxy.
1402 pass
1403
1404
1405 class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
1406 _schema = XCFileLikeElement._schema.copy()
1407 _schema.update({
1408 'explicitFileType': [0, str, 0, 0],
1409 'lastKnownFileType': [0, str, 0, 0],
1410 'name': [0, str, 0, 0],
1411 'path': [0, str, 0, 1],
1412 })
1413
1414 # Weird output rules for PBXFileReference.
1415 _should_print_single_line = True
1416 # super
1417 _encode_transforms = XCFileLikeElement._alternate_encode_transforms
1418
1419 def __init__(self, properties=None, id=None, parent=None):
1420 # super
1421 XCFileLikeElement.__init__(self, properties, id, parent)
1422 if 'path' in self._properties and self._properties['path'].endswith('/'):
1423 self._properties['path'] = self._properties['path'][:-1]
1424 is_dir = True
1425 else:
1426 is_dir = False
1427
1428 if 'path' in self._properties and \
1429 not 'lastKnownFileType' in self._properties and \
1430 not 'explicitFileType' in self._properties:
1431 # TODO(mark): This is the replacement for a replacement for a quick hack.
1432 # It is no longer incredibly sucky, but this list needs to be extended.
1433 extension_map = {
1434 'a': 'archive.ar',
1435 'app': 'wrapper.application',
1436 'bdic': 'file',
1437 'bundle': 'wrapper.cfbundle',
1438 'c': 'sourcecode.c.c',
1439 'cc': 'sourcecode.cpp.cpp',
1440 'cpp': 'sourcecode.cpp.cpp',
1441 'css': 'text.css',
1442 'cxx': 'sourcecode.cpp.cpp',
1443 'dylib': 'compiled.mach-o.dylib',
1444 'framework': 'wrapper.framework',
1445 'h': 'sourcecode.c.h',
1446 'hxx': 'sourcecode.cpp.h',
1447 'icns': 'image.icns',
1448 'java': 'sourcecode.java',
1449 'js': 'sourcecode.javascript',
1450 'm': 'sourcecode.c.objc',
1451 'mm': 'sourcecode.cpp.objcpp',
1452 'nib': 'wrapper.nib',
1453 'pdf': 'image.pdf',
1454 'pl': 'text.script.perl',
1455 'plist': 'text.plist.xml',
1456 'pm': 'text.script.perl',
1457 'png': 'image.png',
1458 'py': 'text.script.python',
1459 'r': 'sourcecode.rez',
1460 'rez': 'sourcecode.rez',
1461 's': 'sourcecode.asm',
1462 'strings': 'text.plist.strings',
1463 'ttf': 'file',
1464 'xcconfig': 'text.xcconfig',
1465 'xib': 'file.xib',
1466 'y': 'sourcecode.yacc',
1467 }
1468
1469 if is_dir:
1470 file_type = 'folder'
1471 else:
1472 basename = posixpath.basename(self._properties['path'])
1473 (root, ext) = posixpath.splitext(basename)
1474 # Check the map using a lowercase extension.
1475 # TODO(mark): Maybe it should try with the original case first and fall
1476 # back to lowercase, in case there are any instances where case
1477 # matters. There currently aren't.
1478 if ext != '':
1479 ext = ext[1:].lower()
1480
1481 # TODO(mark): "text" is the default value, but "file" is appropriate
1482 # for unrecognized files not containing text. Xcode seems to choose
1483 # based on content.
1484 file_type = extension_map.get(ext, 'text')
1485
1486 self._properties['lastKnownFileType'] = file_type
1487
1488
1489 class PBXVariantGroup(PBXGroup, XCFileLikeElement):
1490 """PBXVariantGroup is used by Xcode to represent localizations."""
1491 # No additions to the schema relative to PBXGroup.
1492 pass
1493
1494
1495 # PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
1496 # because it uses PBXContainerItemProxy, defined below.
1497
1498
1499 class XCBuildConfiguration(XCObject):
1500 _schema = XCObject._schema.copy()
1501 _schema.update({
1502 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
1503 'buildSettings': [0, dict, 0, 1, {}],
1504 'name': [0, str, 0, 1],
1505 })
1506
1507 def HasBuildSetting(self, key):
1508 return key in self._properties['buildSettings']
1509
1510 def GetBuildSetting(self, key):
1511 return self._properties['buildSettings'][key]
1512
1513 def SetBuildSetting(self, key, value):
1514 # TODO(mark): If a list, copy?
1515 self._properties['buildSettings'][key] = value
1516
1517 def AppendBuildSetting(self, key, value):
1518 if not key in self._properties['buildSettings']:
1519 self._properties['buildSettings'][key] = []
1520 self._properties['buildSettings'][key].append(value)
1521
1522 def DelBuildSetting(self, key):
1523 if key in self._properties['buildSettings']:
1524 del self._properties['buildSettings'][key]
1525
1526
1527 class XCConfigurationList(XCObject):
1528 # _configs is the default list of configurations.
1529 _configs = [ XCBuildConfiguration({'name': 'Debug'}),
1530 XCBuildConfiguration({'name': 'Release'}) ]
1531
1532 _schema = XCObject._schema.copy()
1533 _schema.update({
1534 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
1535 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
1536 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
1537 })
1538
1539 def Name(self):
1540 return 'Build configuration list for ' + \
1541 self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
1542
1543 def ConfigurationNamed(self, name):
1544 """Convenience accessor to obtain an XCBuildConfiguration by name."""
1545 for configuration in self._properties['buildConfigurations']:
1546 if configuration._properties['name'] == name:
1547 return configuration
1548
1549 raise KeyError, name
1550
1551 def DefaultConfiguration(self):
1552 """Convenience accessor to obtain the default XCBuildConfiguration."""
1553 return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
1554
1555 def HasBuildSetting(self, key):
1556 """Determines the state of a build setting in all XCBuildConfiguration
1557 child objects.
1558
1559 If all child objects have key in their build settings, and the value is the
1560 same in all child objects, returns 1.
1561
1562 If no child objects have the key in their build settings, returns 0.
1563
1564 If some, but not all, child objects have the key in their build settings,
1565 or if any children have different values for the key, returns -1.
1566 """
1567
1568 has = None
1569 value = None
1570 for configuration in self._properties['buildConfigurations']:
1571 configuration_has = configuration.HasBuildSetting(key)
1572 if has == None:
1573 has = configuration_has
1574 elif has != configuration_has:
1575 return -1
1576
1577 if configuration_has:
1578 configuration_value = configuration.GetBuildSetting(key)
1579 if value == None:
1580 value = configuration_value
1581 elif value != configuration_value:
1582 return -1
1583
1584 if not has:
1585 return 0
1586
1587 return 1
1588
1589 def GetBuildSetting(self, key):
1590 """Gets the build setting for key.
1591
1592 All child XCConfiguration objects must have the same value set for the
1593 setting, or a ValueError will be raised.
1594 """
1595
1596 # TODO(mark): This is wrong for build settings that are lists. The list
1597 # contents should be compared (and a list copy returned?)
1598
1599 value = None
1600 for configuration in self._properties['buildConfigurations']:
1601 configuration_value = configuration.GetBuildSetting(key)
1602 if value == None:
1603 value = configuration_value
1604 else:
1605 if value != configuration_value:
1606 raise ValueError, 'Variant values for ' + key
1607
1608 return value
1609
1610 def SetBuildSetting(self, key, value):
1611 """Sets the build setting for key to value in all child
1612 XCBuildConfiguration objects.
1613 """
1614
1615 for configuration in self._properties['buildConfigurations']:
1616 configuration.SetBuildSetting(key, value)
1617
1618 def AppendBuildSetting(self, key, value):
1619 """Appends value to the build setting for key, which is treated as a list,
1620 in all child XCBuildConfiguration objects.
1621 """
1622
1623 for configuration in self._properties['buildConfigurations']:
1624 configuration.AppendBuildSetting(key, value)
1625
1626 def DelBuildSetting(self, key):
1627 """Deletes the build setting key from all child XCBuildConfiguration
1628 objects.
1629 """
1630
1631 for configuration in self._properties['buildConfigurations']:
1632 configuration.DelBuildSetting(key)
1633
1634
1635 class PBXBuildFile(XCObject):
1636 _schema = XCObject._schema.copy()
1637 _schema.update({
1638 'fileRef': [0, XCFileLikeElement, 0, 1],
1639 })
1640
1641 # Weird output rules for PBXBuildFile.
1642 _should_print_single_line = True
1643 _encode_transforms = XCObject._alternate_encode_transforms
1644
1645 def Name(self):
1646 # Example: "main.cc in Sources"
1647 return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
1648
1649 def Hashables(self):
1650 # super
1651 hashables = XCObject.Hashables(self)
1652
1653 # It is not sufficient to just rely on Name() to get the
1654 # XCFileLikeElement's name, because that is not a complete pathname.
1655 # PathHashables returns hashables unique enough that no two
1656 # PBXBuildFiles should wind up with the same set of hashables, unless
1657 # someone adds the same file multiple times to the same target. That
1658 # would be considered invalid anyway.
1659 hashables.extend(self._properties['fileRef'].PathHashables())
1660
1661 return hashables
1662
1663
1664 class XCBuildPhase(XCObject):
1665 """Abstract base for build phase classes. Not represented in a project
1666 file.
1667
1668 Attributes:
1669 _files_by_path: A dict mapping each path of a child in the files list by
1670 path (keys) to the corresponding PBXBuildFile children (values).
1671 _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
1672 to the corresponding PBXBuildFile children (values).
1673 """
1674
1675 # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
1676 # actually have a "files" list. XCBuildPhase should not have "files" but
1677 # another abstract subclass of it should provide this, and concrete build
1678 # phase types that do have "files" lists should be derived from that new
1679 # abstract subclass. XCBuildPhase should only provide buildActionMask and
1680 # runOnlyForDeploymentPostprocessing, and not files or the various
1681 # file-related methods and attributes.
1682
1683 _schema = XCObject._schema.copy()
1684 _schema.update({
1685 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
1686 'files': [1, PBXBuildFile, 1, 1, []],
1687 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
1688 })
1689
1690 def __init__(self, properties=None, id=None, parent=None):
1691 # super
1692 XCObject.__init__(self, properties, id, parent)
1693
1694 self._files_by_path = {}
1695 self._files_by_xcfilelikeelement = {}
1696 for pbxbuildfile in self._properties.get('files', []):
1697 self._AddBuildFileToDicts(pbxbuildfile)
1698
1699 def FileGroup(self, path):
1700 # Subclasses must override this by returning a two-element tuple. The
1701 # first item in the tuple should be the PBXGroup to which "path" should be
1702 # added, either as a child or deeper descendant. The second item should
1703 # be a boolean indicating whether files should be added into hierarchical
1704 # groups or one single flat group.
1705 raise NotImplementedError, \
1706 self.__class__.__name__ + ' must implement FileGroup'
1707
1708 def _AddPathToDict(self, pbxbuildfile, path):
1709 """Adds path to the dict tracking paths belonging to this build phase.
1710
1711 If the path is already a member of this build phase, raises an exception.
1712 """
1713
1714 if path in self._files_by_path:
1715 raise ValueError, 'Found multiple build files with path ' + path
1716 self._files_by_path[path] = pbxbuildfile
1717
1718 def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
1719 """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
1720
1721 If path is specified, then it is the path that is being added to the
1722 phase, and pbxbuildfile must contain either a PBXFileReference directly
1723 referencing that path, or it must contain a PBXVariantGroup that itself
1724 contains a PBXFileReference referencing the path.
1725
1726 If path is not specified, either the PBXFileReference's path or the paths
1727 of all children of the PBXVariantGroup are taken as being added to the
1728 phase.
1729
1730 If the path is already present in the phase, raises an exception.
1731
1732 If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
1733 are already present in the phase, referenced by a different PBXBuildFile
1734 object, raises an exception. This does not raise an exception when
1735 a PBXFileReference or PBXVariantGroup reappear and are referenced by the
1736 same PBXBuildFile that has already introduced them, because in the case
1737 of PBXVariantGroup objects, they may correspond to multiple paths that are
1738 not all added simultaneously. When this situation occurs, the path needs
1739 to be added to _files_by_path, but nothing needs to change in
1740 _files_by_xcfilelikeelement, and the caller should have avoided adding
1741 the PBXBuildFile if it is already present in the list of children.
1742 """
1743
1744 xcfilelikeelement = pbxbuildfile._properties['fileRef']
1745
1746 paths = []
1747 if path != None:
1748 # It's best when the caller provides the path.
1749 if isinstance(xcfilelikeelement, PBXVariantGroup):
1750 paths.append(path)
1751 else:
1752 # If the caller didn't provide a path, there can be either multiple
1753 # paths (PBXVariantGroup) or one.
1754 if isinstance(xcfilelikeelement, PBXVariantGroup):
1755 for variant in xcfilelikeelement._properties['children']:
1756 paths.append(variant.FullPath())
1757 else:
1758 paths.append(xcfilelikeelement.FullPath())
1759
1760 # Add the paths first, because if something's going to raise, the
1761 # messages provided by _AddPathToDict are more useful owing to its
1762 # having access to a real pathname and not just an object's Name().
1763 for a_path in paths:
1764 self._AddPathToDict(pbxbuildfile, a_path)
1765
1766 # If another PBXBuildFile references this XCFileLikeElement, there's a
1767 # problem.
1768 if xcfilelikeelement in self._files_by_xcfilelikeelement and \
1769 self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
1770 raise ValueError, 'Found multiple build files for ' + \
1771 xcfilelikeelement.Name()
1772 self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
1773
1774 def AppendBuildFile(self, pbxbuildfile, path=None):
1775 # Callers should use this instead of calling
1776 # AppendProperty('files', pbxbuildfile) directly because this function
1777 # maintains the object's dicts. Better yet, callers can just call AddFile
1778 # with a pathname and not worry about building their own PBXBuildFile
1779 # objects.
1780 self.AppendProperty('files', pbxbuildfile)
1781 self._AddBuildFileToDicts(pbxbuildfile, path)
1782
1783 def AddFile(self, path):
1784 (file_group, hierarchical) = self.FileGroup(path)
1785 file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
1786
1787 if file_ref in self._files_by_xcfilelikeelement and \
1788 isinstance(file_ref, PBXVariantGroup):
1789 # There's already a PBXBuildFile in this phase corresponding to the
1790 # PBXVariantGroup. path just provides a new variant that belongs to
1791 # the group. Add the path to the dict.
1792 pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
1793 self._AddBuildFileToDicts(pbxbuildfile, path)
1794 else:
1795 # Add a new PBXBuildFile to get file_ref into the phase.
1796 pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
1797 self.AppendBuildFile(pbxbuildfile, path)
1798
1799
1800 class PBXHeadersBuildPhase(XCBuildPhase):
1801 # No additions to the schema relative to XCBuildPhase.
1802
1803 def Name(self):
1804 return 'Headers'
1805
1806 def FileGroup(self, path):
1807 return self.PBXProjectAncestor().RootGroupForPath(path)
1808
1809
1810 class PBXResourcesBuildPhase(XCBuildPhase):
1811 # No additions to the schema relative to XCBuildPhase.
1812
1813 def Name(self):
1814 return 'Resources'
1815
1816 def FileGroup(self, path):
1817 return self.PBXProjectAncestor().RootGroupForPath(path)
1818
1819
1820 class PBXSourcesBuildPhase(XCBuildPhase):
1821 # No additions to the schema relative to XCBuildPhase.
1822
1823 def Name(self):
1824 return 'Sources'
1825
1826 def FileGroup(self, path):
1827 return self.PBXProjectAncestor().RootGroupForPath(path)
1828
1829
1830 class PBXFrameworksBuildPhase(XCBuildPhase):
1831 # No additions to the schema relative to XCBuildPhase.
1832
1833 def Name(self):
1834 return 'Frameworks'
1835
1836 def FileGroup(self, path):
1837 return (self.PBXProjectAncestor().FrameworksGroup(), False)
1838
1839
1840 class PBXShellScriptBuildPhase(XCBuildPhase):
1841 _schema = XCBuildPhase._schema.copy()
1842 _schema.update({
1843 'inputPaths': [1, str, 0, 1, []],
1844 'name': [0, str, 0, 0],
1845 'outputPaths': [1, str, 0, 1, []],
1846 'shellPath': [0, str, 0, 1, '/bin/sh'],
1847 'shellScript': [0, str, 0, 1],
1848 'showEnvVarsInLog': [0, int, 0, 0],
1849 })
1850
1851 def Name(self):
1852 if 'name' in self._properties:
1853 return self._properties['name']
1854
1855 return 'ShellScript'
1856
1857
1858 class PBXCopyFilesBuildPhase(XCBuildPhase):
1859 _schema = XCBuildPhase._schema.copy()
1860 _schema.update({
1861 'dstPath': [0, str, 0, 1],
1862 'dstSubfolderSpec': [0, int, 0, 1],
1863 'name': [0, str, 0, 0],
1864 })
1865
1866 # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
1867 # "DIR", match group 3 is "path" or None.
1868 path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
1869
1870 # path_tree_to_subfolder maps names of Xcode variables to the associated
1871 # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
1872 path_tree_to_subfolder = {
1873 'BUILT_PRODUCTS_DIR': 16, # Products Directory
1874 # Other types that can be chosen via the Xcode UI.
1875 # TODO(mark): Map Xcode variable names to these.
1876 # : 1, # Wrapper
1877 # : 6, # Executables: 6
1878 # : 7, # Resources
1879 # : 15, # Java Resources
1880 # : 10, # Frameworks
1881 # : 11, # Shared Frameworks
1882 # : 12, # Shared Support
1883 # : 13, # PlugIns
1884 }
1885
1886 def Name(self):
1887 if 'name' in self._properties:
1888 return self._properties['name']
1889
1890 return 'CopyFiles'
1891
1892 def FileGroup(self, path):
1893 return self.PBXProjectAncestor().RootGroupForPath(path)
1894
1895 def SetDestination(self, path):
1896 """Set the dstSubfolderSpec and dstPath properties from path.
1897
1898 path may be specified in the same notation used for XCHierarchicalElements,
1899 specifically, "$(DIR)/path".
1900 """
1901
1902 path_tree_match = self.path_tree_re.search(path)
1903 if path_tree_match:
1904 # Everything else needs to be relative to an Xcode variable.
1905 path_tree = path_tree_match.group(1)
1906 relative_path = path_tree_match.group(3)
1907
1908 if path_tree in self.path_tree_to_subfolder:
1909 subfolder = self.path_tree_to_subfolder[path_tree]
1910 if relative_path == None:
1911 relative_path = ''
1912 else:
1913 # The path starts with an unrecognized Xcode variable
1914 # name like $(SRCROOT). Xcode will still handle this
1915 # as an "absolute path" that starts with the variable.
1916 subfolder = 0
1917 relative_path = path
1918 elif path.startswith('/'):
1919 # Special case. Absolute paths are in dstSubfolderSpec 0.
1920 subfolder = 0
1921 relative_path = path[1:]
1922 else:
1923 raise ValueError, 'Can\'t use path %s in a %s' % \
1924 (path, self.__class__.__name__)
1925
1926 self._properties['dstPath'] = relative_path
1927 self._properties['dstSubfolderSpec'] = subfolder
1928
1929
1930 class PBXBuildRule(XCObject):
1931 _schema = XCObject._schema.copy()
1932 _schema.update({
1933 'compilerSpec': [0, str, 0, 1],
1934 'filePatterns': [0, str, 0, 0],
1935 'fileType': [0, str, 0, 1],
1936 'isEditable': [0, int, 0, 1, 1],
1937 'outputFiles': [1, str, 0, 1, []],
1938 'script': [0, str, 0, 0],
1939 })
1940
1941 def Name(self):
1942 # Not very inspired, but it's what Xcode uses.
1943 return self.__class__.__name__
1944
1945 def Hashables(self):
1946 # super
1947 hashables = XCObject.Hashables(self)
1948
1949 # Use the hashables of the weak objects that this object refers to.
1950 hashables.append(self._properties['fileType'])
1951 if 'filePatterns' in self._properties:
1952 hashables.append(self._properties['filePatterns'])
1953 return hashables
1954
1955
1956 class PBXContainerItemProxy(XCObject):
1957 # When referencing an item in this project file, containerPortal is the
1958 # PBXProject root object of this project file. When referencing an item in
1959 # another project file, containerPortal is a PBXFileReference identifying
1960 # the other project file.
1961 #
1962 # When serving as a proxy to an XCTarget (in this project file or another),
1963 # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
1964 # project file), proxyType is 2. Type 2 is used for references to the
1965 # producs of the other project file's targets.
1966 #
1967 # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
1968 # a comment, indicating that it's tracked internally simply as a string, but
1969 # sometimes it's printed with a comment (usually when the object is initially
1970 # created), indicating that it's tracked as a project file object at least
1971 # sometimes. This module always tracks it as an object, but contains a hack
1972 # to prevent it from printing the comment in the project file output. See
1973 # _XCKVPrint.
1974 _schema = XCObject._schema.copy()
1975 _schema.update({
1976 'containerPortal': [0, XCContainerPortal, 0, 1],
1977 'proxyType': [0, int, 0, 1],
1978 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
1979 'remoteInfo': [0, str, 0, 1],
1980 })
1981
1982 def __repr__(self):
1983 props = self._properties
1984 name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
1985 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
1986
1987 def Name(self):
1988 # Admittedly not the best name, but it's what Xcode uses.
1989 return self.__class__.__name__
1990
1991 def Hashables(self):
1992 # super
1993 hashables = XCObject.Hashables(self)
1994
1995 # Use the hashables of the weak objects that this object refers to.
1996 hashables.extend(self._properties['containerPortal'].Hashables())
1997 hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
1998 return hashables
1999
2000
2001 class PBXTargetDependency(XCObject):
2002 # The "target" property accepts an XCTarget object, and obviously not
2003 # NoneType. But XCTarget is defined below, so it can't be put into the
2004 # schema yet. The definition of PBXTargetDependency can't be moved below
2005 # XCTarget because XCTarget's own schema references PBXTargetDependency.
2006 # Python doesn't deal well with this circular relationship, and doesn't have
2007 # a real way to do forward declarations. To work around, the type of
2008 # the "target" property is reset below, after XCTarget is defined.
2009 #
2010 # At least one of "name" and "target" is required.
2011 _schema = XCObject._schema.copy()
2012 _schema.update({
2013 'name': [0, str, 0, 0],
2014 'target': [0, None.__class__, 0, 0],
2015 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
2016 })
2017
2018 def __repr__(self):
2019 name = self._properties.get('name') or self._properties['target'].Name()
2020 return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
2021
2022 def Name(self):
2023 # Admittedly not the best name, but it's what Xcode uses.
2024 return self.__class__.__name__
2025
2026 def Hashables(self):
2027 # super
2028 hashables = XCObject.Hashables(self)
2029
2030 # Use the hashables of the weak objects that this object refers to.
2031 hashables.extend(self._properties['targetProxy'].Hashables())
2032 return hashables
2033
2034
2035 class PBXReferenceProxy(XCFileLikeElement):
2036 _schema = XCFileLikeElement._schema.copy()
2037 _schema.update({
2038 'fileType': [0, str, 0, 1],
2039 'path': [0, str, 0, 1],
2040 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
2041 })
2042
2043
2044 class XCTarget(XCRemoteObject):
2045 # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
2046 # to allow PBXProject to be used in the remoteGlobalIDString property of
2047 # PBXContainerItemProxy.
2048 #
2049 # Setting a "name" property at instantiation may also affect "productName",
2050 # which may in turn affect the "PRODUCT_NAME" build setting in children of
2051 # "buildConfigurationList". See __init__ below.
2052 _schema = XCRemoteObject._schema.copy()
2053 _schema.update({
2054 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
2055 XCConfigurationList()],
2056 'buildPhases': [1, XCBuildPhase, 1, 1, []],
2057 'dependencies': [1, PBXTargetDependency, 1, 1, []],
2058 'name': [0, str, 0, 1],
2059 'productName': [0, str, 0, 1],
2060 })
2061
2062 def __init__(self, properties=None, id=None, parent=None,
2063 force_outdir=None, force_prefix=None, force_extension=None):
2064 # super
2065 XCRemoteObject.__init__(self, properties, id, parent)
2066
2067 # Set up additional defaults not expressed in the schema. If a "name"
2068 # property was supplied, set "productName" if it is not present. Also set
2069 # the "PRODUCT_NAME" build setting in each configuration, but only if
2070 # the setting is not present in any build configuration.
2071 if 'name' in self._properties:
2072 if not 'productName' in self._properties:
2073 self.SetProperty('productName', self._properties['name'])
2074
2075 if 'productName' in self._properties:
2076 if 'buildConfigurationList' in self._properties:
2077 configs = self._properties['buildConfigurationList']
2078 if configs.HasBuildSetting('PRODUCT_NAME') == 0:
2079 configs.SetBuildSetting('PRODUCT_NAME',
2080 self._properties['productName'])
2081
2082 def AddDependency(self, other):
2083 pbxproject = self.PBXProjectAncestor()
2084 other_pbxproject = other.PBXProjectAncestor()
2085 if pbxproject == other_pbxproject:
2086 # The easy case. Add a dependency to another target in the same
2087 # project file.
2088 container = PBXContainerItemProxy({'containerPortal': pbxproject,
2089 'proxyType': 1,
2090 'remoteGlobalIDString': other,
2091 'remoteInfo': other.Name()})
2092 dependency = PBXTargetDependency({'target': other,
2093 'targetProxy': container})
2094 self.AppendProperty('dependencies', dependency)
2095 else:
2096 # The hard case. Add a dependency to a target in a different project
2097 # file. Actually, this case isn't really so hard.
2098 other_project_ref = \
2099 pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
2100 container = PBXContainerItemProxy({
2101 'containerPortal': other_project_ref,
2102 'proxyType': 1,
2103 'remoteGlobalIDString': other,
2104 'remoteInfo': other.Name(),
2105 })
2106 dependency = PBXTargetDependency({'name': other.Name(),
2107 'targetProxy': container})
2108 self.AppendProperty('dependencies', dependency)
2109
2110 # Proxy all of these through to the build configuration list.
2111
2112 def ConfigurationNamed(self, name):
2113 return self._properties['buildConfigurationList'].ConfigurationNamed(name)
2114
2115 def DefaultConfiguration(self):
2116 return self._properties['buildConfigurationList'].DefaultConfiguration()
2117
2118 def HasBuildSetting(self, key):
2119 return self._properties['buildConfigurationList'].HasBuildSetting(key)
2120
2121 def GetBuildSetting(self, key):
2122 return self._properties['buildConfigurationList'].GetBuildSetting(key)
2123
2124 def SetBuildSetting(self, key, value):
2125 return self._properties['buildConfigurationList'].SetBuildSetting(key, \
2126 value)
2127
2128 def AppendBuildSetting(self, key, value):
2129 return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
2130 value)
2131
2132 def DelBuildSetting(self, key):
2133 return self._properties['buildConfigurationList'].DelBuildSetting(key)
2134
2135
2136 # Redefine the type of the "target" property. See PBXTargetDependency._schema
2137 # above.
2138 PBXTargetDependency._schema['target'][1] = XCTarget
2139
2140
2141 class PBXNativeTarget(XCTarget):
2142 # buildPhases is overridden in the schema to be able to set defaults.
2143 #
2144 # NOTE: Contrary to most objects, it is advisable to set parent when
2145 # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
2146 # object. A parent reference is required for a PBXNativeTarget during
2147 # construction to be able to set up the target defaults for productReference,
2148 # because a PBXBuildFile object must be created for the target and it must
2149 # be added to the PBXProject's mainGroup hierarchy.
2150 _schema = XCTarget._schema.copy()
2151 _schema.update({
2152 'buildPhases': [1, XCBuildPhase, 1, 1,
2153 [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
2154 'buildRules': [1, PBXBuildRule, 1, 1, []],
2155 'productReference': [0, PBXFileReference, 0, 1],
2156 'productType': [0, str, 0, 1],
2157 })
2158
2159 # Mapping from Xcode product-types to settings. The settings are:
2160 # filetype : used for explicitFileType in the project file
2161 # prefix : the prefix for the file name
2162 # suffix : the suffix for the filen ame
2163 # set_xc_exe_prefix : bool to say if EXECUTABLE_PREFIX should be set to the
2164 # prefix value.
2165 _product_filetypes = {
2166 'com.apple.product-type.application': ['wrapper.application',
2167 '', '.app', False],
2168 'com.apple.product-type.bundle': ['wrapper.cfbundle',
2169 '', '.bundle', False],
2170 'com.apple.product-type.framework': ['wrapper.framework',
2171 '', '.framework', False],
2172 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
2173 'lib', '.dylib', True],
2174 'com.apple.product-type.library.static': ['archive.ar',
2175 'lib', '.a', False],
2176 'com.apple.product-type.tool': ['compiled.mach-o.executable',
2177 '', '', False],
2178 }
2179
2180 def __init__(self, properties=None, id=None, parent=None,
2181 force_outdir=None, force_prefix=None, force_extension=None):
2182 # super
2183 XCTarget.__init__(self, properties, id, parent)
2184
2185 if 'productName' in self._properties and \
2186 'productType' in self._properties and \
2187 not 'productReference' in self._properties and \
2188 self._properties['productType'] in self._product_filetypes:
2189 products_group = None
2190 pbxproject = self.PBXProjectAncestor()
2191 if pbxproject != None:
2192 products_group = pbxproject.ProductsGroup()
2193
2194 if products_group != None:
2195 (filetype, prefix, suffix, set_xc_exe_prefix) = \
2196 self._product_filetypes[self._properties['productType']]
2197
2198 if force_extension is not None:
2199 # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
2200 if filetype.startswith('wrapper.'):
2201 self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
2202 else:
2203 # Extension override.
2204 suffix = '.' + force_extension
2205 self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
2206
2207 if filetype.startswith('compiled.mach-o.executable'):
2208 product_name = self._properties['productName']
2209 product_name += suffix
2210 suffix = ''
2211 self.SetProperty('productName', product_name)
2212 self.SetBuildSetting('PRODUCT_NAME', product_name)
2213
2214 # Xcode handles most prefixes based on the target type, however there
2215 # are exceptions. If a "BSD Dynamic Library" target is added in the
2216 # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
2217 # behavior.
2218 if force_prefix is not None:
2219 prefix = force_prefix
2220 if filetype.startswith('wrapper.'):
2221 self.SetBuildSetting('WRAPPER_PREFIX', prefix)
2222 else:
2223 self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
2224
2225 if force_outdir is not None:
2226 self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
2227
2228 # TODO(tvl): Remove the below hack.
2229 # http://code.google.com/p/gyp/issues/detail?id=122
2230
2231 # Some targets include the prefix in the target_name. These targets
2232 # really should just add a product_name setting that doesn't include
2233 # the prefix. For example:
2234 # target_name = 'libevent', product_name = 'event'
2235 # This check cleans up for them.
2236 product_name = self._properties['productName']
2237 prefix_len = len(prefix)
2238 if prefix_len and (product_name[:prefix_len] == prefix):
2239 product_name = product_name[prefix_len:]
2240 self.SetProperty('productName', product_name)
2241 self.SetBuildSetting('PRODUCT_NAME', product_name)
2242
2243 ref_props = {
2244 'explicitFileType': filetype,
2245 'includeInIndex': 0,
2246 'path': prefix + product_name + suffix,
2247 'sourceTree': 'BUILT_PRODUCTS_DIR',
2248 }
2249 file_ref = PBXFileReference(ref_props)
2250 products_group.AppendChild(file_ref)
2251 self.SetProperty('productReference', file_ref)
2252
2253 def GetBuildPhaseByType(self, type):
2254 if not 'buildPhases' in self._properties:
2255 return None
2256
2257 the_phase = None
2258 for phase in self._properties['buildPhases']:
2259 if isinstance(phase, type):
2260 # Some phases may be present in multiples in a well-formed project file,
2261 # but phases like PBXSourcesBuildPhase may only be present singly, and
2262 # this function is intended as an aid to GetBuildPhaseByType. Loop
2263 # over the entire list of phases and assert if more than one of the
2264 # desired type is found.
2265 assert the_phase == None
2266 the_phase = phase
2267
2268 return the_phase
2269
2270 def ResourcesPhase(self):
2271 resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
2272 if resources_phase == None:
2273 resources_phase = PBXResourcesBuildPhase()
2274
2275 # The resources phase should come before the sources and frameworks
2276 # phases, if any.
2277 insert_at = len(self._properties['buildPhases'])
2278 for index in xrange(0, len(self._properties['buildPhases'])):
2279 phase = self._properties['buildPhases'][index]
2280 if isinstance(phase, PBXSourcesBuildPhase) or \
2281 isinstance(phase, PBXFrameworksBuildPhase):
2282 insert_at = index
2283 break
2284
2285 self._properties['buildPhases'].insert(insert_at, resources_phase)
2286 resources_phase.parent = self
2287
2288 return resources_phase
2289
2290 def SourcesPhase(self):
2291 sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
2292 if sources_phase == None:
2293 sources_phase = PBXSourcesBuildPhase()
2294 self.AppendProperty('buildPhases', sources_phase)
2295
2296 return sources_phase
2297
2298 def FrameworksPhase(self):
2299 frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
2300 if frameworks_phase == None:
2301 frameworks_phase = PBXFrameworksBuildPhase()
2302 self.AppendProperty('buildPhases', frameworks_phase)
2303
2304 return frameworks_phase
2305
2306 def AddDependency(self, other):
2307 # super
2308 XCTarget.AddDependency(self, other)
2309
2310 static_library_type = 'com.apple.product-type.library.static'
2311 shared_library_type = 'com.apple.product-type.library.dynamic'
2312 framework_type = 'com.apple.product-type.framework'
2313 if isinstance(other, PBXNativeTarget) and \
2314 'productType' in self._properties and \
2315 self._properties['productType'] != static_library_type and \
2316 'productType' in other._properties and \
2317 (other._properties['productType'] == static_library_type or \
2318 ((other._properties['productType'] == shared_library_type or \
2319 other._properties['productType'] == framework_type) and \
2320 ((not other.HasBuildSetting('MACH_O_TYPE')) or
2321 other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
2322
2323 file_ref = other.GetProperty('productReference')
2324
2325 pbxproject = self.PBXProjectAncestor()
2326 other_pbxproject = other.PBXProjectAncestor()
2327 if pbxproject != other_pbxproject:
2328 other_project_product_group = \
2329 pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
2330 file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
2331
2332 self.FrameworksPhase().AppendProperty('files',
2333 PBXBuildFile({'fileRef': file_ref}))
2334
2335
2336 class PBXAggregateTarget(XCTarget):
2337 pass
2338
2339
2340 class PBXProject(XCContainerPortal):
2341 # A PBXProject is really just an XCObject, the XCContainerPortal thing is
2342 # just to allow PBXProject to be used in the containerPortal property of
2343 # PBXContainerItemProxy.
2344 """
2345
2346 Attributes:
2347 path: "sample.xcodeproj". TODO(mark) Document me!
2348 _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
2349 value is a reference to the dict in the
2350 projectReferences list associated with the keyed
2351 PBXProject.
2352 """
2353
2354 _schema = XCContainerPortal._schema.copy()
2355 _schema.update({
2356 'attributes': [0, dict, 0, 0],
2357 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
2358 XCConfigurationList()],
2359 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.1'],
2360 'hasScannedForEncodings': [0, int, 0, 1, 1],
2361 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
2362 'projectDirPath': [0, str, 0, 1, ''],
2363 'projectReferences': [1, dict, 0, 0],
2364 'projectRoot': [0, str, 0, 1, ''],
2365 'targets': [1, XCTarget, 1, 1, []],
2366 })
2367
2368 def __init__(self, properties=None, id=None, parent=None, path=None):
2369 self.path = path
2370 self._other_pbxprojects = {}
2371 # super
2372 return XCContainerPortal.__init__(self, properties, id, parent)
2373
2374 def Name(self):
2375 name = self.path
2376 if name[-10:] == '.xcodeproj':
2377 name = name[:-10]
2378 return posixpath.basename(name)
2379
2380 def Path(self):
2381 return self.path
2382
2383 def Comment(self):
2384 return 'Project object'
2385
2386 def Children(self):
2387 # super
2388 children = XCContainerPortal.Children(self)
2389
2390 # Add children that the schema doesn't know about. Maybe there's a more
2391 # elegant way around this, but this is the only case where we need to own
2392 # objects in a dictionary (that is itself in a list), and three lines for
2393 # a one-off isn't that big a deal.
2394 if 'projectReferences' in self._properties:
2395 for reference in self._properties['projectReferences']:
2396 children.append(reference['ProductGroup'])
2397
2398 return children
2399
2400 def PBXProjectAncestor(self):
2401 return self
2402
2403 def _GroupByName(self, name):
2404 if not 'mainGroup' in self._properties:
2405 self.SetProperty('mainGroup', PBXGroup())
2406
2407 main_group = self._properties['mainGroup']
2408 group = main_group.GetChildByName(name)
2409 if group == None:
2410 group = PBXGroup({'name': name})
2411 main_group.AppendChild(group)
2412
2413 return group
2414
2415 # SourceGroup and ProductsGroup are created by default in Xcode's own
2416 # templates.
2417 def SourceGroup(self):
2418 return self._GroupByName('Source')
2419
2420 def ProductsGroup(self):
2421 return self._GroupByName('Products')
2422
2423 # IntermediatesGroup is used to collect source-like files that are generated
2424 # by rules or script phases and are placed in intermediate directories such
2425 # as DerivedSources.
2426 def IntermediatesGroup(self):
2427 return self._GroupByName('Intermediates')
2428
2429 # FrameworksGroup and ProjectsGroup are top-level groups used to collect
2430 # frameworks and projects.
2431 def FrameworksGroup(self):
2432 return self._GroupByName('Frameworks')
2433
2434 def ProjectsGroup(self):
2435 return self._GroupByName('Projects')
2436
2437 def RootGroupForPath(self, path):
2438 """Returns a PBXGroup child of this object to which path should be added.
2439
2440 This method is intended to choose between SourceGroup and
2441 IntermediatesGroup on the basis of whether path is present in a source
2442 directory or an intermediates directory. For the purposes of this
2443 determination, any path located within a derived file directory such as
2444 PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
2445 directory.
2446
2447 The returned value is a two-element tuple. The first element is the
2448 PBXGroup, and the second element specifies whether that group should be
2449 organized hierarchically (True) or as a single flat list (False).
2450 """
2451
2452 # TODO(mark): make this a class variable and bind to self on call?
2453 # Also, this list is nowhere near exhaustive.
2454 # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
2455 # gyp.generator.xcode. There should probably be some way for that module
2456 # to push the names in, rather than having to hard-code them here.
2457 source_tree_groups = {
2458 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
2459 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
2460 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
2461 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
2462 }
2463
2464 (source_tree, path) = SourceTreeAndPathFromPath(path)
2465 if source_tree != None and source_tree in source_tree_groups:
2466 (group_func, hierarchical) = source_tree_groups[source_tree]
2467 group = group_func()
2468 return (group, hierarchical)
2469
2470 # TODO(mark): make additional choices based on file extension.
2471
2472 return (self.SourceGroup(), True)
2473
2474 def AddOrGetFileInRootGroup(self, path):
2475 """Returns a PBXFileReference corresponding to path in the correct group
2476 according to RootGroupForPath's heuristics.
2477
2478 If an existing PBXFileReference for path exists, it will be returned.
2479 Otherwise, one will be created and returned.
2480 """
2481
2482 (group, hierarchical) = self.RootGroupForPath(path)
2483 return group.AddOrGetFileByPath(path, hierarchical)
2484
2485 def RootGroupsTakeOverOnlyChildren(self, recurse=False):
2486 """Calls TakeOverOnlyChild for all groups in the main group."""
2487
2488 for group in self._properties['mainGroup']._properties['children']:
2489 if isinstance(group, PBXGroup):
2490 group.TakeOverOnlyChild(recurse)
2491
2492 def SortGroups(self):
2493 # Sort the children of the mainGroup (like "Source" and "Products")
2494 # according to their defined order.
2495 self._properties['mainGroup']._properties['children'] = \
2496 sorted(self._properties['mainGroup']._properties['children'],
2497 cmp=lambda x,y: x.CompareRootGroup(y))
2498
2499 # Sort everything else by putting group before files, and going
2500 # alphabetically by name within sections of groups and files. SortGroup
2501 # is recursive.
2502 for group in self._properties['mainGroup']._properties['children']:
2503 if not isinstance(group, PBXGroup):
2504 continue
2505
2506 if group.Name() == 'Products':
2507 # The Products group is a special case. Instead of sorting
2508 # alphabetically, sort things in the order of the targets that
2509 # produce the products. To do this, just build up a new list of
2510 # products based on the targets.
2511 products = []
2512 for target in self._properties['targets']:
2513 if not isinstance(target, PBXNativeTarget):
2514 continue
2515 product = target._properties['productReference']
2516 # Make sure that the product is already in the products group.
2517 assert product in group._properties['children']
2518 products.append(product)
2519
2520 # Make sure that this process doesn't miss anything that was already
2521 # in the products group.
2522 assert len(products) == len(group._properties['children'])
2523 group._properties['children'] = products
2524 else:
2525 group.SortGroup()
2526
2527 def AddOrGetProjectReference(self, other_pbxproject):
2528 """Add a reference to another project file (via PBXProject object) to this
2529 one.
2530
2531 Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
2532 this project file that contains a PBXReferenceProxy object for each
2533 product of each PBXNativeTarget in the other project file. ProjectRef is
2534 a PBXFileReference to the other project file.
2535
2536 If this project file already references the other project file, the
2537 existing ProductGroup and ProjectRef are returned. The ProductGroup will
2538 still be updated if necessary.
2539 """
2540
2541 if not 'projectReferences' in self._properties:
2542 self._properties['projectReferences'] = []
2543
2544 product_group = None
2545 project_ref = None
2546
2547 if not other_pbxproject in self._other_pbxprojects:
2548 # This project file isn't yet linked to the other one. Establish the
2549 # link.
2550 product_group = PBXGroup({'name': 'Products'})
2551
2552 # ProductGroup is strong.
2553 product_group.parent = self
2554
2555 # There's nothing unique about this PBXGroup, and if left alone, it will
2556 # wind up with the same set of hashables as all other PBXGroup objects
2557 # owned by the projectReferences list. Add the hashables of the
2558 # remote PBXProject that it's related to.
2559 product_group._hashables.extend(other_pbxproject.Hashables())
2560
2561 # The other project reports its path as relative to the same directory
2562 # that this project's path is relative to. The other project's path
2563 # is not necessarily already relative to this project. Figure out the
2564 # pathname that this project needs to use to refer to the other one.
2565 this_path = posixpath.dirname(self.Path())
2566 projectDirPath = self.GetProperty('projectDirPath')
2567 if projectDirPath:
2568 if posixpath.isabs(projectDirPath[0]):
2569 this_path = projectDirPath
2570 else:
2571 this_path = posixpath.join(this_path, projectDirPath)
2572 other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
2573
2574 # ProjectRef is weak (it's owned by the mainGroup hierarchy).
2575 project_ref = PBXFileReference({
2576 'lastKnownFileType': 'wrapper.pb-project',
2577 'path': other_path,
2578 'sourceTree': 'SOURCE_ROOT',
2579 })
2580 self.ProjectsGroup().AppendChild(project_ref)
2581
2582 ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
2583 self._other_pbxprojects[other_pbxproject] = ref_dict
2584 self.AppendProperty('projectReferences', ref_dict)
2585
2586 # Xcode seems to sort this list case-insensitively
2587 self._properties['projectReferences'] = \
2588 sorted(self._properties['projectReferences'], cmp=lambda x,y:
2589 cmp(x['ProjectRef'].Name().lower(),
2590 y['ProjectRef'].Name().lower()))
2591 else:
2592 # The link already exists. Pull out the relevnt data.
2593 project_ref_dict = self._other_pbxprojects[other_pbxproject]
2594 product_group = project_ref_dict['ProductGroup']
2595 project_ref = project_ref_dict['ProjectRef']
2596
2597 self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
2598
2599 return [product_group, project_ref]
2600
2601 def _SetUpProductReferences(self, other_pbxproject, product_group,
2602 project_ref):
2603 # TODO(mark): This only adds references to products in other_pbxproject
2604 # when they don't exist in this pbxproject. Perhaps it should also
2605 # remove references from this pbxproject that are no longer present in
2606 # other_pbxproject. Perhaps it should update various properties if they
2607 # change.
2608 for target in other_pbxproject._properties['targets']:
2609 if not isinstance(target, PBXNativeTarget):
2610 continue
2611
2612 other_fileref = target._properties['productReference']
2613 if product_group.GetChildByRemoteObject(other_fileref) == None:
2614 # Xcode sets remoteInfo to the name of the target and not the name
2615 # of its product, despite this proxy being a reference to the product.
2616 container_item = PBXContainerItemProxy({
2617 'containerPortal': project_ref,
2618 'proxyType': 2,
2619 'remoteGlobalIDString': other_fileref,
2620 'remoteInfo': target.Name()
2621 })
2622 # TODO(mark): Does sourceTree get copied straight over from the other
2623 # project? Can the other project ever have lastKnownFileType here
2624 # instead of explicitFileType? (Use it if so?) Can path ever be
2625 # unset? (I don't think so.) Can other_fileref have name set, and
2626 # does it impact the PBXReferenceProxy if so? These are the questions
2627 # that perhaps will be answered one day.
2628 reference_proxy = PBXReferenceProxy({
2629 'fileType': other_fileref._properties['explicitFileType'],
2630 'path': other_fileref._properties['path'],
2631 'sourceTree': other_fileref._properties['sourceTree'],
2632 'remoteRef': container_item,
2633 })
2634
2635 product_group.AppendChild(reference_proxy)
2636
2637 def SortRemoteProductReferences(self):
2638 # For each remote project file, sort the associated ProductGroup in the
2639 # same order that the targets are sorted in the remote project file. This
2640 # is the sort order used by Xcode.
2641
2642 def CompareProducts(x, y, remote_products):
2643 # x and y are PBXReferenceProxy objects. Go through their associated
2644 # PBXContainerItem to get the remote PBXFileReference, which will be
2645 # present in the remote_products list.
2646 x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
2647 y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
2648 x_index = remote_products.index(x_remote)
2649 y_index = remote_products.index(y_remote)
2650
2651 # Use the order of each remote PBXFileReference in remote_products to
2652 # determine the sort order.
2653 return cmp(x_index, y_index)
2654
2655 for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
2656 # Build up a list of products in the remote project file, ordered the
2657 # same as the targets that produce them.
2658 remote_products = []
2659 for target in other_pbxproject._properties['targets']:
2660 if not isinstance(target, PBXNativeTarget):
2661 continue
2662 remote_products.append(target._properties['productReference'])
2663
2664 # Sort the PBXReferenceProxy children according to the list of remote
2665 # products.
2666 product_group = ref_dict['ProductGroup']
2667 product_group._properties['children'] = sorted(
2668 product_group._properties['children'],
2669 cmp=lambda x, y: CompareProducts(x, y, remote_products))
2670
2671
2672 class XCProjectFile(XCObject):
2673 _schema = XCObject._schema.copy()
2674 _schema.update({
2675 'archiveVersion': [0, int, 0, 1, 1],
2676 'classes': [0, dict, 0, 1, {}],
2677 'objectVersion': [0, int, 0, 1, 45],
2678 'rootObject': [0, PBXProject, 1, 1],
2679 })
2680
2681 def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
2682 # Although XCProjectFile is implemented here as an XCObject, it's not a
2683 # proper object in the Xcode sense, and it certainly doesn't have its own
2684 # ID. Pass through an attempt to update IDs to the real root object.
2685 if recursive:
2686 self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
2687
2688 def Print(self, file=sys.stdout):
2689 self.VerifyHasRequiredProperties()
2690
2691 # Add the special "objects" property, which will be caught and handled
2692 # separately during printing. This structure allows a fairly standard
2693 # loop do the normal printing.
2694 self._properties['objects'] = {}
2695 self._XCPrint(file, 0, '// !$*UTF8*$!\n')
2696 if self._should_print_single_line:
2697 self._XCPrint(file, 0, '{ ')
2698 else:
2699 self._XCPrint(file, 0, '{\n')
2700 for property, value in sorted(self._properties.iteritems(),
2701 cmp=lambda x, y: cmp(x, y)):
2702 if property == 'objects':
2703 self._PrintObjects(file)
2704 else:
2705 self._XCKVPrint(file, 1, property, value)
2706 self._XCPrint(file, 0, '}\n')
2707 del self._properties['objects']
2708
2709 def _PrintObjects(self, file):
2710 if self._should_print_single_line:
2711 self._XCPrint(file, 0, 'objects = {')
2712 else:
2713 self._XCPrint(file, 1, 'objects = {\n')
2714
2715 objects_by_class = {}
2716 for object in self.Descendants():
2717 if object == self:
2718 continue
2719 class_name = object.__class__.__name__
2720 if not class_name in objects_by_class:
2721 objects_by_class[class_name] = []
2722 objects_by_class[class_name].append(object)
2723
2724 for class_name in sorted(objects_by_class):
2725 self._XCPrint(file, 0, '\n')
2726 self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
2727 for object in sorted(objects_by_class[class_name],
2728 cmp=lambda x, y: cmp(x.id, y.id)):
2729 object.Print(file)
2730 self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
2731
2732 if self._should_print_single_line:
2733 self._XCPrint(file, 0, '}; ')
2734 else:
2735 self._XCPrint(file, 1, '};\n')
+0
-81
third_party/gyp/samples/samples less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import os.path
7 import shutil
8 import sys
9
10
11 gyps = [
12 'app/app.gyp',
13 'base/base.gyp',
14 'build/temp_gyp/googleurl.gyp',
15 'build/all.gyp',
16 'build/common.gypi',
17 'build/external_code.gypi',
18 'chrome/test/security_tests/security_tests.gyp',
19 'chrome/third_party/hunspell/hunspell.gyp',
20 'chrome/chrome.gyp',
21 'media/media.gyp',
22 'net/net.gyp',
23 'printing/printing.gyp',
24 'sdch/sdch.gyp',
25 'skia/skia.gyp',
26 'testing/gmock.gyp',
27 'testing/gtest.gyp',
28 'third_party/bzip2/bzip2.gyp',
29 'third_party/icu38/icu38.gyp',
30 'third_party/libevent/libevent.gyp',
31 'third_party/libjpeg/libjpeg.gyp',
32 'third_party/libpng/libpng.gyp',
33 'third_party/libxml/libxml.gyp',
34 'third_party/libxslt/libxslt.gyp',
35 'third_party/lzma_sdk/lzma_sdk.gyp',
36 'third_party/modp_b64/modp_b64.gyp',
37 'third_party/npapi/npapi.gyp',
38 'third_party/sqlite/sqlite.gyp',
39 'third_party/zlib/zlib.gyp',
40 'v8/tools/gyp/v8.gyp',
41 'webkit/activex_shim/activex_shim.gyp',
42 'webkit/activex_shim_dll/activex_shim_dll.gyp',
43 'webkit/build/action_csspropertynames.py',
44 'webkit/build/action_cssvaluekeywords.py',
45 'webkit/build/action_jsconfig.py',
46 'webkit/build/action_makenames.py',
47 'webkit/build/action_maketokenizer.py',
48 'webkit/build/action_useragentstylesheets.py',
49 'webkit/build/rule_binding.py',
50 'webkit/build/rule_bison.py',
51 'webkit/build/rule_gperf.py',
52 'webkit/tools/test_shell/test_shell.gyp',
53 'webkit/webkit.gyp',
54 ]
55
56
57 def Main(argv):
58 if len(argv) != 3 or argv[1] not in ['push', 'pull']:
59 print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
60 return 1
61
62 path_to_chrome = argv[2]
63
64 for g in gyps:
65 chrome_file = os.path.join(path_to_chrome, g)
66 local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
67 if argv[1] == 'push':
68 print 'Copying %s to %s' % (local_file, chrome_file)
69 shutil.copyfile(local_file, chrome_file)
70 elif argv[1] == 'pull':
71 print 'Copying %s to %s' % (chrome_file, local_file)
72 shutil.copyfile(chrome_file, local_file)
73 else:
74 assert False
75
76 return 0
77
78
79 if __name__ == '__main__':
80 sys.exit(Main(sys.argv))
+0
-5
third_party/gyp/samples/samples.bat less more
0 @rem Copyright (c) 2009 Google Inc. All rights reserved.
1 @rem Use of this source code is governed by a BSD-style license that can be
2 @rem found in the LICENSE file.
3
4 @python %~dp0/samples %*
+0
-26
third_party/gyp/setup.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 from distutils.core import setup
7 from distutils.command.install import install
8 from distutils.command.install_lib import install_lib
9 from distutils.command.install_scripts import install_scripts
10
11 setup(
12 name='gyp',
13 version='0.1',
14 description='Generate Your Projects',
15 author='Chromium Authors',
16 author_email='chromium-dev@googlegroups.com',
17 url='http://code.google.com/p/gyp',
18 package_dir = {'': 'pylib'},
19 packages=['gyp', 'gyp.generator'],
20
21 scripts = ['gyp'],
22 cmdclass = {'install': install,
23 'install_lib': install_lib,
24 'install_scripts': install_scripts},
25 )
+0
-94
third_party/gyp/test/actions/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using an explicit build target of 'all'.
8 """
9
10 import glob
11 import os
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('actions.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 # Test that an "always run" action increases a counter on multiple invocations,
21 # and that a dependent action updates in step.
22 test.build('actions.gyp', test.ALL, chdir='relocate/src')
23 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
24 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
25 test.build('actions.gyp', test.ALL, chdir='relocate/src')
26 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
27 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
28
29 # The "always run" action only counts to 2, but the dependent target will count
30 # forever if it's allowed to run. This verifies that the dependent target only
31 # runs when the "always run" action generates new output, not just because the
32 # "always run" ran.
33 test.build('actions.gyp', test.ALL, chdir='relocate/src')
34 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
35 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
36
37 expect = """\
38 Hello from program.c
39 Hello from make-prog1.py
40 Hello from make-prog2.py
41 """
42
43 if test.format == 'xcode':
44 chdir = 'relocate/src/subdir1'
45 else:
46 chdir = 'relocate/src'
47 test.run_built_executable('program', chdir=chdir, stdout=expect)
48
49
50 test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
51
52
53 expect = "Hello from generate_main.py\n"
54
55 if test.format == 'xcode':
56 chdir = 'relocate/src/subdir3'
57 else:
58 chdir = 'relocate/src'
59 test.run_built_executable('null_input', chdir=chdir, stdout=expect)
60
61
62 # Clean out files which may have been created if test.ALL was run.
63 def clean_dep_files():
64 for file in (glob.glob('relocate/src/dep_*.txt') +
65 glob.glob('relocate/src/deps_all_done_*.txt')):
66 if os.path.exists(file):
67 os.remove(file)
68
69 # Confirm our clean.
70 clean_dep_files()
71 test.must_not_exist('relocate/src/dep_1.txt')
72 test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
73
74 # Make sure all deps finish before an action is run on a 'None' target.
75 # If using the Make builder, add -j to make things more difficult.
76 arguments = []
77 if test.format == 'make':
78 arguments = ['-j']
79 test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
80 arguments=arguments)
81 test.must_exist('relocate/src/deps_all_done_first_123.txt')
82
83 # Try again with a target that has deps in reverse. Output files from
84 # previous tests deleted. Confirm this execution did NOT run the ALL
85 # target which would mess up our dep tests.
86 clean_dep_files()
87 test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
88 arguments=arguments)
89 test.must_exist('relocate/src/deps_all_done_first_321.txt')
90 test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
91
92
93 test.pass_test()
+0
-61
third_party/gyp/test/actions/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using the default build target.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 # Test that an "always run" action increases a counter on multiple invocations,
19 # and that a dependent action updates in step.
20 test.build('actions.gyp', chdir='relocate/src')
21 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
22 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
23 test.build('actions.gyp', chdir='relocate/src')
24 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
25 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
26
27 # The "always run" action only counts to 2, but the dependent target will count
28 # forever if it's allowed to run. This verifies that the dependent target only
29 # runs when the "always run" action generates new output, not just because the
30 # "always run" ran.
31 test.build('actions.gyp', test.ALL, chdir='relocate/src')
32 test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
33 test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
34
35 expect = """\
36 Hello from program.c
37 Hello from make-prog1.py
38 Hello from make-prog2.py
39 """
40
41 if test.format == 'xcode':
42 chdir = 'relocate/src/subdir1'
43 else:
44 chdir = 'relocate/src'
45 test.run_built_executable('program', chdir=chdir, stdout=expect)
46
47
48 test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
49
50
51 expect = "Hello from generate_main.py\n"
52
53 if test.format == 'xcode':
54 chdir = 'relocate/src/subdir3'
55 else:
56 chdir = 'relocate/src'
57 test.run_built_executable('null_input', chdir=chdir, stdout=expect)
58
59
60 test.pass_test()
+0
-24
third_party/gyp/test/actions/gyptest-errors.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies behavior for different action configuration errors:
8 exit status of 1, and the expected error message must be in stderr.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15
16 test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
17 expect = [
18 "Anonymous action in target broken_actions2. An action must have an 'action_name' field.",
19 ]
20 test.must_contain_all_lines(test.stderr(), expect)
21
22
23 test.pass_test()
+0
-24
third_party/gyp/test/actions/src/action_missing_name.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'broken_actions2',
8 'type': 'none',
9 'actions': [
10 {
11 'inputs': [
12 'no_name.input',
13 ],
14 'action': [
15 'python',
16 '-c',
17 'print \'missing name\'',
18 ],
19 },
20 ],
21 },
22 ],
23 }
+0
-114
third_party/gyp/test/actions/src/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 'subdir3/null_input.gyp:*',
13 ],
14 },
15 {
16 'target_name': 'depend_on_always_run_action',
17 'type': 'none',
18 'dependencies': [ 'subdir1/executable.gyp:counter' ],
19 'actions': [
20 {
21 'action_name': 'use_always_run_output',
22 'inputs': [
23 'subdir1/actions-out/action-counter.txt',
24 'subdir1/counter.py',
25 ],
26 'outputs': [
27 'subdir1/actions-out/action-counter_2.txt',
28 ],
29 'action': [
30 'python', 'subdir1/counter.py', '<(_outputs)',
31 ],
32 # Allows the test to run without hermetic cygwin on windows.
33 'msvs_cygwin_shell': 0,
34 },
35 ],
36 },
37
38 # Three deps which don't finish immediately.
39 # Each one has a small delay then creates a file.
40 # Delays are 1.0, 1.1, and 2.0 seconds.
41 {
42 'target_name': 'dep_1',
43 'type': 'none',
44 'actions': [{
45 'inputs': [ 'actions.gyp' ],
46 'outputs': [ 'dep_1.txt' ],
47 'action_name': 'dep_1',
48 'action': [ 'python', '-c',
49 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
50 # Allows the test to run without hermetic cygwin on windows.
51 'msvs_cygwin_shell': 0,
52 }],
53 },
54 {
55 'target_name': 'dep_2',
56 'type': 'none',
57 'actions': [{
58 'inputs': [ 'actions.gyp' ],
59 'outputs': [ 'dep_2.txt' ],
60 'action_name': 'dep_2',
61 'action': [ 'python', '-c',
62 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
63 # Allows the test to run without hermetic cygwin on windows.
64 'msvs_cygwin_shell': 0,
65 }],
66 },
67 {
68 'target_name': 'dep_3',
69 'type': 'none',
70 'actions': [{
71 'inputs': [ 'actions.gyp' ],
72 'outputs': [ 'dep_3.txt' ],
73 'action_name': 'dep_3',
74 'action': [ 'python', '-c',
75 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
76 # Allows the test to run without hermetic cygwin on windows.
77 'msvs_cygwin_shell': 0,
78 }],
79 },
80
81 # An action which assumes the deps have completed.
82 # Does NOT list the output files of it's deps as inputs.
83 # On success create the file deps_all_done_first.txt.
84 {
85 'target_name': 'action_with_dependencies_123',
86 'type': 'none',
87 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
88 'actions': [{
89 'inputs': [ 'actions.gyp' ],
90 'outputs': [ 'deps_all_done_first_123.txt' ],
91 'action_name': 'action_with_dependencies_123',
92 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
93 # Allows the test to run without hermetic cygwin on windows.
94 'msvs_cygwin_shell': 0,
95 }],
96 },
97 # Same as above but with deps in reverse.
98 {
99 'target_name': 'action_with_dependencies_321',
100 'type': 'none',
101 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
102 'actions': [{
103 'inputs': [ 'actions.gyp' ],
104 'outputs': [ 'deps_all_done_first_321.txt' ],
105 'action_name': 'action_with_dependencies_321',
106 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
107 # Allows the test to run without hermetic cygwin on windows.
108 'msvs_cygwin_shell': 0,
109 }],
110 },
111
112 ],
113 }
+0
-16
third_party/gyp/test/actions/src/confirm-dep-files.py less more
0 #!/usr/bin/python
1
2 # Confirm presence of files generated by our targets we depend on.
3 # If they exist, create a new file.
4 #
5 # Note target's input files are explicitly NOT defined in the gyp file
6 # so they can't easily be passed to this script as args.
7
8 import os
9 import sys
10
11 outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
12 if (os.path.exists("dep_1.txt") and
13 os.path.exists("dep_2.txt") and
14 os.path.exists("dep_3.txt")):
15 open(outfile, "w")
+0
-46
third_party/gyp/test/actions/src/subdir1/counter.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7 import time
8
9 output = sys.argv[1]
10 persistoutput = "%s.persist" % sys.argv[1]
11
12 count = 0
13 try:
14 count = open(persistoutput, 'r').read()
15 except:
16 pass
17 count = int(count) + 1
18
19 if len(sys.argv) > 2:
20 max_count = int(sys.argv[2])
21 if count > max_count:
22 count = max_count
23
24 oldcount = 0
25 try:
26 oldcount = open(output, 'r').read()
27 except:
28 pass
29
30 # Save the count in a file that is undeclared, and thus hidden, to gyp. We need
31 # to do this because, prior to running commands, scons deletes any declared
32 # outputs, so we would lose our count if we just wrote to the given output file.
33 # (The other option is to use Precious() in the scons generator, but that seems
34 # too heavy-handed just to support this somewhat unrealistic test case, and
35 # might lead to unintended side-effects).
36 open(persistoutput, 'w').write('%d' % (count))
37
38 # Only write the given output file if the count has changed.
39 if int(oldcount) != count:
40 open(output, 'w').write('%d' % (count))
41 # Sleep so the next run changes the file time sufficiently to make the build
42 # detect the file as changed.
43 time.sleep(1)
44
45 sys.exit(0)
+0
-74
third_party/gyp/test/actions/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 ],
13 'actions': [
14 {
15 'action_name': 'make-prog1',
16 'inputs': [
17 'make-prog1.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/prog1.c',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 },
27 {
28 'action_name': 'make-prog2',
29 'inputs': [
30 'make-prog2.py',
31 ],
32 'outputs': [
33 'actions-out/prog2.c',
34 ],
35 'action': [
36 'python', '<(_inputs)', '<@(_outputs)',
37 ],
38 'process_outputs_as_sources': 1,
39 # Allows the test to run without hermetic cygwin on windows.
40 'msvs_cygwin_shell': 0,
41 },
42 ],
43 },
44 {
45 'target_name': 'counter',
46 'type': 'none',
47 'actions': [
48 {
49 # This action should always run, regardless of whether or not it's
50 # inputs or the command-line change. We do this by creating a dummy
51 # first output, which is always missing, thus causing the build to
52 # always try to recreate it. Actual output files should be listed
53 # after the dummy one, and dependent targets should list the real
54 # output(s) in their inputs
55 # (see '../actions.gyp:depend_on_always_run_action').
56 'action_name': 'action_counter',
57 'inputs': [
58 'counter.py',
59 ],
60 'outputs': [
61 'actions-out/action-counter.txt.always',
62 'actions-out/action-counter.txt',
63 ],
64 'action': [
65 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
66 ],
67 # Allows the test to run without hermetic cygwin on windows.
68 'msvs_cygwin_shell': 0,
69 },
70 ],
71 },
72 ],
73 }
+0
-20
third_party/gyp/test/actions/src/subdir1/make-prog1.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog1(void)
12 {
13 printf("Hello from make-prog1.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-20
third_party/gyp/test/actions/src/subdir1/make-prog2.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog2(void)
12 {
13 printf("Hello from make-prog2.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-12
third_party/gyp/test/actions/src/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-11
third_party/gyp/test/actions/src/subdir2/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = "Hello from make-file.py\n"
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-33
third_party/gyp/test/actions/src/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 'file.out',
18 # TODO: enhance testing infrastructure to test this
19 # without having to hard-code the intermediate dir paths.
20 #'<(INTERMEDIATE_DIR)/file.out',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 # Allows the test to run without hermetic cygwin on windows.
27 'msvs_cygwin_shell': 0,
28 }
29 ],
30 },
31 ],
32 }
+0
-21
third_party/gyp/test/actions/src/subdir3/generate_main.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = """
9 #include <stdio.h>
10
11 int main(int argc, char *argv[])
12 {
13 printf("Hello from generate_main.py\\n");
14 return 0;
15 }
16 """
17
18 open(sys.argv[1], 'w').write(contents)
19
20 sys.exit(0)
+0
-29
third_party/gyp/test/actions/src/subdir3/null_input.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'null_input',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'generate_main',
13 'process_outputs_as_sources': 1,
14 'inputs': [],
15 'outputs': [
16 '<(INTERMEDIATE_DIR)/main.c',
17 ],
18 'action': [
19 # TODO: we can't just use <(_outputs) here?!
20 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
21 ],
22 # Allows the test to run without hermetic cygwin on windows.
23 'msvs_cygwin_shell': 0,
24 },
25 ],
26 },
27 ],
28 }
+0
-23
third_party/gyp/test/actions-bare/gyptest-bare.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies actions which are not depended on by other targets get executed.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('bare.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16 test.build('bare.gyp', chdir='relocate/src')
17
18 file_content = 'Hello from bare.py\n'
19
20 test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
21
22 test.pass_test()
+0
-25
third_party/gyp/test/actions-bare/src/bare.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'bare',
8 'type': 'none',
9 'actions': [
10 {
11 'action_name': 'action1',
12 'inputs': [
13 'bare.py',
14 ],
15 'outputs': [
16 '<(PRODUCT_DIR)/out.txt',
17 ],
18 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
19 'msvs_cygwin_shell': 0,
20 },
21 ],
22 },
23 ],
24 }
+0
-11
third_party/gyp/test/actions-bare/src/bare.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 f = open(sys.argv[1], 'wb')
9 f.write('Hello from bare.py\n')
10 f.close()
+0
-26
third_party/gyp/test/actions-subdir/gyptest-action.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test actions that output to PRODUCT_DIR.
8 """
9
10 import TestGyp
11
12 # TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
13 test = TestGyp.TestGyp(formats=['!xcode'])
14
15 test.run_gyp('none.gyp', chdir='src')
16
17 test.build('none.gyp', test.ALL, chdir='src')
18
19 file_content = 'Hello from make-file.py\n'
20 subdir_file_content = 'Hello from make-subdir-file.py\n'
21
22 test.built_file_must_match('file.out', file_content, chdir='src')
23 test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
24
25 test.pass_test()
+0
-11
third_party/gyp/test/actions-subdir/src/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = 'Hello from make-file.py\n'
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-31
third_party/gyp/test/actions-subdir/src/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/file.out',
18 ],
19 'action': [
20 'python', '<(_inputs)', '<@(_outputs)',
21 ],
22 'process_outputs_as_sources': 1,
23 }
24 ],
25 'dependencies': [
26 'subdir/subdir.gyp:subdir_file',
27 ],
28 },
29 ],
30 }
+0
-11
third_party/gyp/test/actions-subdir/src/subdir/make-subdir-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = 'Hello from make-subdir-file.py\n'
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-28
third_party/gyp/test/actions-subdir/src/subdir/subdir.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'subdir_file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-subdir-file',
13 'inputs': [
14 'make-subdir-file.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/subdir_file.out',
18 ],
19 'action': [
20 'python', '<(_inputs)', '<@(_outputs)',
21 ],
22 'process_outputs_as_sources': 1,
23 }
24 ],
25 },
26 ],
27 }
+0
-55
third_party/gyp/test/additional-targets/gyptest-additional.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple actions when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16
17 # Build all.
18 test.build('all.gyp', chdir='relocate/src')
19
20 if test.format=='xcode':
21 chdir = 'relocate/src/dir1'
22 else:
23 chdir = 'relocate/src'
24
25 # Output is as expected.
26 file_content = 'Hello from emit.py\n'
27 test.built_file_must_match('out2.txt', file_content, chdir=chdir)
28
29 test.built_file_must_not_exist('out.txt', chdir='relocate/src')
30 test.built_file_must_not_exist('foolib1',
31 type=test.SHARED_LIB,
32 chdir=chdir)
33
34 # TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
35 if test.format == 'make':
36 chdir='relocate/src'
37 else:
38 chdir='relocate/src/dir1'
39
40 # Build the action explicitly.
41 test.build('actions.gyp', 'action1_target', chdir=chdir)
42
43 # Check that things got run.
44 file_content = 'Hello from emit.py\n'
45 test.built_file_must_exist('out.txt', chdir=chdir)
46
47 # Build the shared library explicitly.
48 test.build('actions.gyp', 'foolib1', chdir=chdir)
49
50 test.built_file_must_exist('foolib1',
51 type=test.SHARED_LIB,
52 chdir=chdir)
53
54 test.pass_test()
+0
-13
third_party/gyp/test/additional-targets/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'all_targets',
8 'type': 'none',
9 'dependencies': ['dir1/actions.gyp:*'],
10 },
11 ],
12 }
+0
-56
third_party/gyp/test/additional-targets/src/dir1/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'action1_target',
8 'type': 'none',
9 'suppress_wildcard': 1,
10 'actions': [
11 {
12 'action_name': 'action1',
13 'inputs': [
14 'emit.py',
15 ],
16 'outputs': [
17 '<(PRODUCT_DIR)/out.txt',
18 ],
19 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
20 'msvs_cygwin_shell': 0,
21 },
22 ],
23 },
24 {
25 'target_name': 'action2_target',
26 'type': 'none',
27 'actions': [
28 {
29 'action_name': 'action2',
30 'inputs': [
31 'emit.py',
32 ],
33 'outputs': [
34 '<(PRODUCT_DIR)/out2.txt',
35 ],
36 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
37 'msvs_cygwin_shell': 0,
38 },
39 ],
40 },
41 {
42 'target_name': 'foolib1',
43 'type': 'shared_library',
44 'suppress_wildcard': 1,
45 'sources': ['lib1.c'],
46 },
47 ],
48 'conditions': [
49 ['OS=="linux"', {
50 'target_defaults': {
51 'cflags': ['-fPIC'],
52 },
53 }],
54 ],
55 }
+0
-11
third_party/gyp/test/additional-targets/src/dir1/emit.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 f = open(sys.argv[1], 'wb')
9 f.write('Hello from emit.py\n')
10 f.close()
+0
-6
third_party/gyp/test/additional-targets/src/dir1/lib1.c less more
0 #ifdef _WIN32
1 __declspec(dllexport)
2 #endif
3 int func1(void) {
4 return 42;
5 }
+0
-31
third_party/gyp/test/assembly/gyptest-assembly.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that .hpp files are ignored when included in the source list on all
8 platforms.
9 """
10
11 import sys
12 import TestGyp
13
14 # TODO(bradnelson): get this working for windows.
15 test = TestGyp.TestGyp(formats=['make', 'scons', 'xcode'])
16
17 test.run_gyp('assembly.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 test.build('assembly.gyp', test.ALL, chdir='relocate/src')
22
23 expect = """\
24 Hello from program.c
25 Got 42.
26 """
27 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
28
29
30 test.pass_test()
+0
-4
third_party/gyp/test/assembly/src/as.bat less more
0 @echo off
1 :: Mock windows assembler.
2 cl /c %1 /Fo"%2"
3
+0
-59
third_party/gyp/test/assembly/src/assembly.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'conditions': [
7 ['OS=="win"', {
8 'defines': ['PLATFORM_WIN'],
9 }],
10 ['OS=="mac"', {
11 'defines': ['PLATFORM_MAC'],
12 }],
13 ['OS=="linux"', {
14 'defines': ['PLATFORM_LINUX'],
15 }],
16 ],
17 },
18 'targets': [
19 {
20 'target_name': 'program',
21 'type': 'executable',
22 'dependencies': ['lib1'],
23 'sources': [
24 'program.c',
25 ],
26 },
27 {
28 'target_name': 'lib1',
29 'type': 'static_library',
30 'sources': [
31 'lib1.S',
32 ],
33 },
34 ],
35 'conditions': [
36 ['OS=="win"', {
37 'target_defaults': {
38 'rules': [
39 {
40 'rule_name': 'assembler',
41 'msvs_cygwin_shell': 0,
42 'extension': 'S',
43 'inputs': [
44 'as.bat',
45 ],
46 'outputs': [
47 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
48 ],
49 'action':
50 ['as.bat', 'lib1.c', '<(_outputs)'],
51 'message': 'Building assembly file <(RULE_INPUT_PATH)',
52 'process_outputs_as_sources': 1,
53 },
54 ],
55 },
56 },],
57 ],
58 }
+0
-10
third_party/gyp/test/assembly/src/lib1.S less more
0 #if PLATFORM_WINDOWS || PLATFORM_MAC
1 # define IDENTIFIER(n) _##n
2 #else /* Linux */
3 # define IDENTIFIER(n) n
4 #endif
5
6 .globl IDENTIFIER(lib1_function)
7 IDENTIFIER(lib1_function):
8 movl $42, %eax
9 ret
+0
-3
third_party/gyp/test/assembly/src/lib1.c less more
0 int lib1_function(void) {
1 return 42;
2 }
+0
-12
third_party/gyp/test/assembly/src/program.c less more
0 #include <stdio.h>
1
2 extern int lib1_function(void);
3
4 int main(int argc, char *argv[])
5 {
6 fprintf(stdout, "Hello from program.c\n");
7 fflush(stdout);
8 fprintf(stdout, "Got %d.\n", lib1_function());
9 fflush(stdout);
10 return 0;
11 }
+0
-77
third_party/gyp/test/builddir/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify the settings that cause a set of programs to be created in
8 a specific build directory, and that no intermediate built files
9 get created outside of that build directory hierarchy even when
10 referred to with deeply-nested ../../.. paths.
11 """
12
13 import TestGyp
14
15 # TODO(mmoss): Make only supports (theoretically) a single, global build
16 # directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
17 # gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
18 # generators support, so this doesn't work yet for make.
19 # TODO(mmoss) Make also has the issue that the top-level Makefile is written to
20 # the "--depth" location, which is one level above 'src', but then this test
21 # moves 'src' somewhere else, leaving the Makefile behind, so make can't find
22 # its sources. I'm not sure if make is wrong for writing outside the current
23 # directory, or if the test is wrong for assuming everything generated is under
24 # the current directory.
25 test = TestGyp.TestGyp(formats=['!make'])
26
27 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
28
29 test.relocate('src', 'relocate/src')
30
31 test.subdir('relocate/builddir')
32
33 # Make sure that all the built ../../etc. files only get put under builddir,
34 # by making all of relocate read-only and then making only builddir writable.
35 test.writable('relocate', False)
36 test.writable('relocate/builddir', True)
37
38 # Suppress the test infrastructure's setting SYMROOT on the command line.
39 test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
40
41 expect1 = """\
42 Hello from prog1.c
43 Hello from func1.c
44 """
45
46 expect2 = """\
47 Hello from subdir2/prog2.c
48 Hello from func2.c
49 """
50
51 expect3 = """\
52 Hello from subdir2/subdir3/prog3.c
53 Hello from func3.c
54 """
55
56 expect4 = """\
57 Hello from subdir2/subdir3/subdir4/prog4.c
58 Hello from func4.c
59 """
60
61 expect5 = """\
62 Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
63 Hello from func5.c
64 """
65
66 def run_builddir(prog, expect):
67 dir = 'relocate/builddir/Default/'
68 test.run(program=test.workpath(dir + prog), stdout=expect)
69
70 run_builddir('prog1', expect1)
71 run_builddir('prog2', expect2)
72 run_builddir('prog3', expect3)
73 run_builddir('prog4', expect4)
74 run_builddir('prog5', expect5)
75
76 test.pass_test()
+0
-77
third_party/gyp/test/builddir/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify the settings that cause a set of programs to be created in
8 a specific build directory, and that no intermediate built files
9 get created outside of that build directory hierarchy even when
10 referred to with deeply-nested ../../.. paths.
11 """
12
13 import TestGyp
14
15 # TODO(mmoss): Make only supports (theoretically) a single, global build
16 # directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
17 # gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
18 # generators support, so this doesn't work yet for make.
19 # TODO(mmoss) Make also has the issue that the top-level Makefile is written to
20 # the "--depth" location, which is one level above 'src', but then this test
21 # moves 'src' somewhere else, leaving the Makefile behind, so make can't find
22 # its sources. I'm not sure if make is wrong for writing outside the current
23 # directory, or if the test is wrong for assuming everything generated is under
24 # the current directory.
25 test = TestGyp.TestGyp(formats=['!make'])
26
27 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
28
29 test.relocate('src', 'relocate/src')
30
31 test.subdir('relocate/builddir')
32
33 # Make sure that all the built ../../etc. files only get put under builddir,
34 # by making all of relocate read-only and then making only builddir writable.
35 test.writable('relocate', False)
36 test.writable('relocate/builddir', True)
37
38 # Suppress the test infrastructure's setting SYMROOT on the command line.
39 test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
40
41 expect1 = """\
42 Hello from prog1.c
43 Hello from func1.c
44 """
45
46 expect2 = """\
47 Hello from subdir2/prog2.c
48 Hello from func2.c
49 """
50
51 expect3 = """\
52 Hello from subdir2/subdir3/prog3.c
53 Hello from func3.c
54 """
55
56 expect4 = """\
57 Hello from subdir2/subdir3/subdir4/prog4.c
58 Hello from func4.c
59 """
60
61 expect5 = """\
62 Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
63 Hello from func5.c
64 """
65
66 def run_builddir(prog, expect):
67 dir = 'relocate/builddir/Default/'
68 test.run(program=test.workpath(dir + prog), stdout=expect)
69
70 run_builddir('prog1', expect1)
71 run_builddir('prog2', expect2)
72 run_builddir('prog3', expect3)
73 run_builddir('prog4', expect4)
74 run_builddir('prog5', expect5)
75
76 test.pass_test()
+0
-21
third_party/gyp/test/builddir/src/builddir.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Default': {
8 'msvs_configuration_attributes': {
9 'OutputDirectory': '<(DEPTH)\\builddir\Default',
10 },
11 },
12 },
13 },
14 'scons_settings': {
15 'sconsbuild_dir': '<(DEPTH)/builddir',
16 },
17 'xcode_settings': {
18 'SYMROOT': '<(DEPTH)/builddir',
19 },
20 }
+0
-6
third_party/gyp/test/builddir/src/func1.c less more
0 #include <stdio.h>
1
2 void func1(void)
3 {
4 printf("Hello from func1.c\n");
5 }
+0
-6
third_party/gyp/test/builddir/src/func2.c less more
0 #include <stdio.h>
1
2 void func2(void)
3 {
4 printf("Hello from func2.c\n");
5 }
+0
-6
third_party/gyp/test/builddir/src/func3.c less more
0 #include <stdio.h>
1
2 void func3(void)
3 {
4 printf("Hello from func3.c\n");
5 }
+0
-6
third_party/gyp/test/builddir/src/func4.c less more
0 #include <stdio.h>
1
2 void func4(void)
3 {
4 printf("Hello from func4.c\n");
5 }
+0
-6
third_party/gyp/test/builddir/src/func5.c less more
0 #include <stdio.h>
1
2 void func5(void)
3 {
4 printf("Hello from func5.c\n");
5 }
+0
-10
third_party/gyp/test/builddir/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void func1(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog1.c\n");
7 func1();
8 return 0;
9 }
+0
-30
third_party/gyp/test/builddir/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'pull_in_all',
11 'type': 'none',
12 'dependencies': [
13 'prog1',
14 'subdir2/prog2.gyp:prog2',
15 'subdir2/subdir3/prog3.gyp:prog3',
16 'subdir2/subdir3/subdir4/prog4.gyp:prog4',
17 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
18 ],
19 },
20 {
21 'target_name': 'prog1',
22 'type': 'executable',
23 'sources': [
24 'prog1.c',
25 'func1.c',
26 ],
27 },
28 ],
29 }
+0
-10
third_party/gyp/test/builddir/src/subdir2/prog2.c less more
0 #include <stdio.h>
1
2 extern void func2(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/prog2.c\n");
7 func2();
8 return 0;
9 }
+0
-19
third_party/gyp/test/builddir/src/subdir2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'sources': [
13 'prog2.c',
14 '../func2.c',
15 ],
16 },
17 ],
18 }
+0
-10
third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.c less more
0 #include <stdio.h>
1
2 extern void func3(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/prog3.c\n");
7 func3();
8 return 0;
9 }
+0
-19
third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'sources': [
13 'prog3.c',
14 '../../func3.c',
15 ],
16 },
17 ],
18 }
+0
-10
third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c less more
0 #include <stdio.h>
1
2 extern void func4(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
7 func4();
8 return 0;
9 }
+0
-19
third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog4',
11 'type': 'executable',
12 'sources': [
13 'prog4.c',
14 '../../../func4.c',
15 ],
16 },
17 ],
18 }
+0
-10
third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c less more
0 #include <stdio.h>
1
2 extern void func5(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
7 func5();
8 return 0;
9 }
+0
-19
third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../../../builddir.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog5',
11 'type': 'executable',
12 'sources': [
13 'prog5.c',
14 '../../../../func5.c',
15 ],
16 },
17 ],
18 }
+0
-29
third_party/gyp/test/compilable/gyptest-headers.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that .hpp files are ignored when included in the source list on all
8 platforms.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('headers.gyp', chdir='src')
16
17 test.relocate('src', 'relocate/src')
18
19 test.build('headers.gyp', test.ALL, chdir='relocate/src')
20
21 expect = """\
22 Hello from program.c
23 Hello from lib1.c
24 """
25 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
26
27
28 test.pass_test()
+0
-26
third_party/gyp/test/compilable/src/headers.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'dependencies': [
10 'lib1'
11 ],
12 'sources': [
13 'program.cpp',
14 ],
15 },
16 {
17 'target_name': 'lib1',
18 'type': 'static_library',
19 'sources': [
20 'lib1.hpp',
21 'lib1.cpp',
22 ],
23 },
24 ],
25 }
+0
-7
third_party/gyp/test/compilable/src/lib1.cpp less more
0 #include <stdio.h>
1 #include "lib1.hpp"
2
3 void lib1_function(void) {
4 fprintf(stdout, "Hello from lib1.c\n");
5 fflush(stdout);
6 }
+0
-6
third_party/gyp/test/compilable/src/lib1.hpp less more
0 #ifndef _lib1_hpp
1 #define _lib1_hpp
2
3 extern void lib1_function(void);
4
5 #endif
+0
-9
third_party/gyp/test/compilable/src/program.cpp less more
0 #include <stdio.h>
1 #include "lib1.hpp"
2
3 int main(int argc, char *argv[]) {
4 fprintf(stdout, "Hello from program.c\n");
5 fflush(stdout);
6 lib1_function();
7 return 0;
8 }
+0
-15
third_party/gyp/test/configurations/basics/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #ifdef FOO
5 printf("Foo configuration\n");
6 #endif
7 #ifdef DEBUG
8 printf("Debug configuration\n");
9 #endif
10 #ifdef RELEASE
11 printf("Release configuration\n");
12 #endif
13 return 0;
14 }
+0
-32
third_party/gyp/test/configurations/basics/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'executable',
9 'sources': [
10 'configurations.c',
11 ],
12 'configurations': {
13 'Debug': {
14 'defines': [
15 'DEBUG',
16 ],
17 },
18 'Release': {
19 'defines': [
20 'RELEASE',
21 ],
22 },
23 'Foo': {
24 'defines': [
25 'FOO',
26 ],
27 },
28 }
29 },
30 ],
31 }
+0
-29
third_party/gyp/test/configurations/basics/gyptest-configurations.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('configurations.gyp')
15
16 test.set_configuration('Release')
17 test.build('configurations.gyp')
18 test.run_built_executable('configurations', stdout="Release configuration\n")
19
20 test.set_configuration('Debug')
21 test.build('configurations.gyp')
22 test.run_built_executable('configurations', stdout="Debug configuration\n")
23
24 test.set_configuration('Foo')
25 test.build('configurations.gyp')
26 test.run_built_executable('configurations', stdout="Foo configuration\n")
27
28 test.pass_test()
+0
-21
third_party/gyp/test/configurations/inheritance/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #ifdef BASE
5 printf("Base configuration\n");
6 #endif
7 #ifdef COMMON
8 printf("Common configuration\n");
9 #endif
10 #ifdef COMMON2
11 printf("Common2 configuration\n");
12 #endif
13 #ifdef DEBUG
14 printf("Debug configuration\n");
15 #endif
16 #ifdef RELEASE
17 printf("Release configuration\n");
18 #endif
19 return 0;
20 }
+0
-40
third_party/gyp/test/configurations/inheritance/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Base': {
8 'abstract': 1,
9 'defines': ['BASE'],
10 },
11 'Common': {
12 'abstract': 1,
13 'inherit_from': ['Base'],
14 'defines': ['COMMON'],
15 },
16 'Common2': {
17 'abstract': 1,
18 'defines': ['COMMON2'],
19 },
20 'Debug': {
21 'inherit_from': ['Common', 'Common2'],
22 'defines': ['DEBUG'],
23 },
24 'Release': {
25 'inherit_from': ['Common', 'Common2'],
26 'defines': ['RELEASE'],
27 },
28 },
29 },
30 'targets': [
31 {
32 'target_name': 'configurations',
33 'type': 'executable',
34 'sources': [
35 'configurations.c',
36 ],
37 },
38 ],
39 }
+0
-33
third_party/gyp/test/configurations/inheritance/gyptest-inheritance.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('configurations.gyp')
15
16 test.set_configuration('Release')
17 test.build('configurations.gyp')
18 test.run_built_executable('configurations',
19 stdout=('Base configuration\n'
20 'Common configuration\n'
21 'Common2 configuration\n'
22 'Release configuration\n'))
23
24 test.set_configuration('Debug')
25 test.build('configurations.gyp')
26 test.run_built_executable('configurations',
27 stdout=('Base configuration\n'
28 'Common configuration\n'
29 'Common2 configuration\n'
30 'Debug configuration\n'))
31
32 test.pass_test()
+0
-18
third_party/gyp/test/configurations/invalid/actions.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'actions': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/all_dependent_settings.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'all_dependent_settings': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/configurations.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'configurations': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/dependencies.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'dependencies': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/direct_dependent_settings.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'direct_dependent_settings': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-35
third_party/gyp/test/configurations/invalid/gyptest-configurations.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 # Keys that do not belong inside a configuration dictionary.
13 invalid_configuration_keys = [
14 'actions',
15 'all_dependent_settings',
16 'configurations',
17 'dependencies',
18 'direct_dependent_settings',
19 'libraries',
20 'link_settings',
21 'sources',
22 'target_name',
23 'type',
24 ]
25
26 test = TestGyp.TestGyp()
27
28 for test_key in invalid_configuration_keys:
29 test.run_gyp('%s.gyp' % test_key, status=1, stderr=None)
30 expect = ['%s not allowed in the Debug configuration, found in target '
31 '%s.gyp:configurations#target' % (test_key, test_key)]
32 test.must_contain_all_lines(test.stderr(), expect)
33
34 test.pass_test()
+0
-18
third_party/gyp/test/configurations/invalid/libraries.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'libraries': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/link_settings.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'link_settings': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/sources.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'sources': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/target_name.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'target_name': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-18
third_party/gyp/test/configurations/invalid/type.gyp less more
0 # Copyright (c) 2010 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'configurations',
8 'type': 'none',
9 'configurations': {
10 'Debug': {
11 'type': [
12 ],
13 },
14 }
15 },
16 ],
17 }
+0
-58
third_party/gyp/test/configurations/target_platform/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Debug_Win32': {
8 'msvs_configuration_platform': 'Win32',
9 },
10 'Debug_x64': {
11 'msvs_configuration_platform': 'x64',
12 },
13 },
14 },
15 'targets': [
16 {
17 'target_name': 'left',
18 'type': 'static_library',
19 'sources': [
20 'left.c',
21 ],
22 'configurations': {
23 'Debug_Win32': {
24 'msvs_target_platform': 'x64',
25 },
26 },
27 },
28 {
29 'target_name': 'right',
30 'type': 'static_library',
31 'sources': [
32 'right.c',
33 ],
34 },
35 {
36 'target_name': 'front_left',
37 'type': 'executable',
38 'dependencies': ['left'],
39 'sources': [
40 'front.c',
41 ],
42 'configurations': {
43 'Debug_Win32': {
44 'msvs_target_platform': 'x64',
45 },
46 },
47 },
48 {
49 'target_name': 'front_right',
50 'type': 'executable',
51 'dependencies': ['right'],
52 'sources': [
53 'front.c',
54 ],
55 },
56 ],
57 }
+0
-8
third_party/gyp/test/configurations/target_platform/front.c less more
0 #include <stdio.h>
1
2 const char *message(void);
3
4 int main(int argc, char *argv[]) {
5 printf("%s\n", message());
6 return 0;
7 }
+0
-40
third_party/gyp/test/configurations/target_platform/gyptest-target_platform.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Tests the msvs specific msvs_target_platform option.
8 """
9
10 import TestGyp
11 import TestCommon
12
13
14 def RunX64(exe, stdout):
15 try:
16 test.run_built_executable(exe, stdout=stdout)
17 except WindowsError, e:
18 # Assume the exe is 64-bit if it can't load on 32-bit systems.
19 # Both versions of the error are required because different versions
20 # of python seem to return different errors for invalid exe type.
21 if e.errno != 193 and '[Error 193]' not in str(e):
22 raise
23
24
25 test = TestGyp.TestGyp(formats=['msvs'])
26
27 test.run_gyp('configurations.gyp')
28
29 test.set_configuration('Debug|x64')
30 test.build('configurations.gyp', rebuild=True)
31 RunX64('front_left', stdout=('left\n'))
32 RunX64('front_right', stdout=('right\n'))
33
34 test.set_configuration('Debug|Win32')
35 test.build('configurations.gyp', rebuild=True)
36 RunX64('front_left', stdout=('left\n'))
37 test.run_built_executable('front_right', stdout=('right\n'))
38
39 test.pass_test()
+0
-3
third_party/gyp/test/configurations/target_platform/left.c less more
0 const char *message(void) {
1 return "left";
2 }
+0
-3
third_party/gyp/test/configurations/target_platform/right.c less more
0 const char *message(void) {
1 return "right";
2 }
+0
-12
third_party/gyp/test/configurations/x64/configurations.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[]) {
3 if (sizeof(void*) == 4) {
4 printf("Running Win32\n");
5 } else if (sizeof(void*) == 8) {
6 printf("Running x64\n");
7 } else {
8 printf("Unexpected platform\n");
9 }
10 return 0;
11 }
+0
-26
third_party/gyp/test/configurations/x64/configurations.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'configurations': {
7 'Debug': {
8 'msvs_configuration_platform': 'Win32',
9 },
10 'Debug_x64': {
11 'inherit_from': ['Debug'],
12 'msvs_configuration_platform': 'x64',
13 },
14 },
15 },
16 'targets': [
17 {
18 'target_name': 'configurations',
19 'type': 'executable',
20 'sources': [
21 'configurations.c',
22 ],
23 },
24 ],
25 }
+0
-29
third_party/gyp/test/configurations/x64/gyptest-x86.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable in three different configurations.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp(formats=['msvs'])
13
14 test.run_gyp('configurations.gyp')
15
16 for platform in ['Win32', 'x64']:
17 test.set_configuration('Debug|%s' % platform)
18 test.build('configurations.gyp', rebuild=True)
19 try:
20 test.run_built_executable('configurations',
21 stdout=('Running %s\n' % platform))
22 except WindowsError, e:
23 # Assume the exe is 64-bit if it can't load on 32-bit systems.
24 if platform == 'x64' and (e.errno == 193 or '[Error 193]' in str(e)):
25 continue
26 raise
27
28 test.pass_test()
+0
-40
third_party/gyp/test/copies/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies.gyp', test.ALL, chdir='relocate/src')
19
20 test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
21
22 test.built_file_must_match('copies-out/file2',
23 'file2 contents\n',
24 chdir='relocate/src')
25
26 test.built_file_must_match('copies-out/directory/file3',
27 'file3 contents\n',
28 chdir='relocate/src')
29 test.built_file_must_match('copies-out/directory/file4',
30 'file4 contents\n',
31 chdir='relocate/src')
32 test.built_file_must_match('copies-out/directory/subdir/file5',
33 'file5 contents\n',
34 chdir='relocate/src')
35 test.built_file_must_match('copies-out/subdir/file6',
36 'file6 contents\n',
37 chdir='relocate/src')
38
39 test.pass_test()
+0
-40
third_party/gyp/test/copies/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using the build tool default.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies.gyp', chdir='relocate/src')
19
20 test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
21
22 test.built_file_must_match('copies-out/file2',
23 'file2 contents\n',
24 chdir='relocate/src')
25
26 test.built_file_must_match('copies-out/directory/file3',
27 'file3 contents\n',
28 chdir='relocate/src')
29 test.built_file_must_match('copies-out/directory/file4',
30 'file4 contents\n',
31 chdir='relocate/src')
32 test.built_file_must_match('copies-out/directory/subdir/file5',
33 'file5 contents\n',
34 chdir='relocate/src')
35 test.built_file_must_match('copies-out/subdir/file6',
36 'file6 contents\n',
37 chdir='relocate/src')
38
39 test.pass_test()
+0
-70
third_party/gyp/test/copies/src/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'copies1',
8 'type': 'none',
9 'copies': [
10 {
11 'destination': 'copies-out',
12 'files': [
13 'file1',
14 ],
15 },
16 ],
17 },
18 {
19 'target_name': 'copies2',
20 'type': 'none',
21 'copies': [
22 {
23 'destination': '<(PRODUCT_DIR)/copies-out',
24 'files': [
25 'file2',
26 ],
27 },
28 ],
29 },
30 # Copy a directory tree.
31 {
32 'target_name': 'copies_recursive',
33 'type': 'none',
34 'copies': [
35 {
36 'destination': '<(PRODUCT_DIR)/copies-out',
37 'files': [
38 'directory/',
39 ],
40 },
41 ],
42 },
43 # Copy a directory from deeper in the tree (this should not reproduce the
44 # entire directory path in the destination, only the final directory).
45 {
46 'target_name': 'copies_recursive_depth',
47 'type': 'none',
48 'copies': [
49 {
50 'destination': '<(PRODUCT_DIR)/copies-out',
51 'files': [
52 'parentdir/subdir/',
53 ],
54 },
55 ],
56 },
57 # Verify that a null 'files' list doesn't gag the generators.
58 {
59 'target_name': 'copies_null',
60 'type': 'none',
61 'copies': [
62 {
63 'destination': '<(PRODUCT_DIR)/copies-null',
64 'files': [],
65 },
66 ],
67 },
68 ],
69 }
+0
-1
third_party/gyp/test/copies/src/directory/file3 less more
0 file3 contents
+0
-1
third_party/gyp/test/copies/src/directory/file4 less more
0 file4 contents
+0
-1
third_party/gyp/test/copies/src/directory/subdir/file5 less more
0 file5 contents
+0
-1
third_party/gyp/test/copies/src/file1 less more
0 file1 contents
+0
-1
third_party/gyp/test/copies/src/file2 less more
0 file2 contents
+0
-1
third_party/gyp/test/copies/src/parentdir/subdir/file6 less more
0 file6 contents
+0
-21
third_party/gyp/test/copies-link/gyptest-copies-link.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using the build tool default.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies-link.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('copies-link.gyp', chdir='relocate/src')
19
20 test.pass_test()
+0
-61
third_party/gyp/test/copies-link/src/copies-link.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'func1',
8 'type': 'static_library',
9 'sources': ['func1.c'],
10 },
11 {
12 'target_name': 'clone_func1',
13 'type': 'none',
14 'dependencies': ['func1'],
15 'actions': [
16 {
17 'action_name': 'cloning library',
18 'inputs': [
19 '<(LIB_DIR)/<(STATIC_LIB_PREFIX)func1<(STATIC_LIB_SUFFIX)'
20 ],
21 'outputs': ['<(PRODUCT_DIR)/alternate/'
22 '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)'],
23 'destination': '<(PRODUCT_DIR)',
24 'action': ['python', 'copy.py', '<@(_inputs)', '<@(_outputs)'],
25 'msvs_cygwin_shell': 0,
26 },
27 ],
28 },
29 {
30 'target_name': 'copy_cloned',
31 'type': 'none',
32 'dependencies': ['clone_func1'],
33 'copies': [
34 {
35 'destination': '<(LIB_DIR)',
36 'files': [
37 '<(PRODUCT_DIR)/alternate/'
38 '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)',
39 ],
40 },
41 ],
42 },
43 {
44 'target_name': 'use_cloned',
45 'type': 'executable',
46 'sources': ['main.c'],
47 'dependencies': ['copy_cloned'],
48 'link_settings': {
49 'conditions': [
50 ['OS=="win"', {
51 'libraries': ['-l"<(LIB_DIR)/cloned.lib"'],
52 }, {
53 'libraries': ['-lcloned'],
54 'ldflags': ['-L <(LIB_DIR)'],
55 }],
56 ],
57 },
58 },
59 ],
60 }
+0
-21
third_party/gyp/test/copies-link/src/copy.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import shutil
7 import sys
8
9
10 def main(argv):
11 if len(argv) != 3:
12 print 'USAGE: copy.py <src> <dst>'
13 return 1
14
15 shutil.copy(argv[1], argv[2])
16 return 0
17
18
19 if __name__ == '__main__':
20 sys.exit(main(sys.argv))
+0
-9
third_party/gyp/test/copies-link/src/func1.c less more
0 #include <stdio.h>
1
2 extern void func1(void);
3
4 int main(int argc, char *argv[]) {
5 printf("hello from link1\n");
6 func1();
7 return 0;
8 }
+0
-5
third_party/gyp/test/copies-link/src/main.c less more
0 #include <stdio.h>
1
2 void func1(void) {
3 printf("hello from func1\n");
4 }
+0
-22
third_party/gyp/test/defines/defines-env.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'value%': '5',
7 },
8 'targets': [
9 {
10 'target_name': 'defines',
11 'type': 'executable',
12 'sources': [
13 'defines.c',
14 ],
15 'defines': [
16 'VALUE=<(value)',
17 ],
18 },
19 ],
20 }
21
+0
-14
third_party/gyp/test/defines/defines.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 #ifdef FOO
9 printf("FOO is defined\n");
10 #endif
11 printf("VALUE is %d\n", VALUE);
12 return 0;
13 }
+0
-36
third_party/gyp/test/defines/defines.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'defines',
8 'type': 'executable',
9 'sources': [
10 'defines.c',
11 ],
12 'defines': [
13 'FOO',
14 'VALUE=1',
15 ],
16 },
17 ],
18 'conditions': [
19 ['OS=="fakeos"', {
20 'targets': [
21 {
22 'target_name': 'fakeosprogram',
23 'type': 'executable',
24 'sources': [
25 'defines.c',
26 ],
27 'defines': [
28 'FOO',
29 'VALUE=1',
30 ],
31 },
32 ],
33 }],
34 ],
35 }
+0
-34
third_party/gyp/test/defines/gyptest-define-override.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a default gyp define can be overridden.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 # Command-line define
16 test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
17 test.build('defines.gyp')
18 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
19 # Clean up the exe so subsequent tests don't find an old exe.
20 os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
21
22 # Without "OS" override, fokeosprogram shouldn't be built.
23 test.run_gyp('defines.gyp')
24 test.build('defines.gyp')
25 test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
26
27 # Environment define
28 os.environ['GYP_DEFINES'] = 'OS=fakeos'
29 test.run_gyp('defines.gyp')
30 test.build('defines.gyp')
31 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
32
33 test.pass_test()
+0
-49
third_party/gyp/test/defines/gyptest-defines-env-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define, and
8 the use of the environment during regeneration when the gyp file changes.
9 """
10
11 import os
12 import TestGyp
13
14 # Regenerating build files when a gyp file changes is currently only supported
15 # by the make generator.
16 test = TestGyp.TestGyp(formats=['make'])
17
18 try:
19 os.environ['GYP_DEFINES'] = 'value=50'
20 test.run_gyp('defines.gyp')
21 finally:
22 # We clear the environ after calling gyp. When the auto-regeneration happens,
23 # the same define should be reused anyway. Reset to empty string first in
24 # case the platform doesn't support unsetenv.
25 os.environ['GYP_DEFINES'] = ''
26 del os.environ['GYP_DEFINES']
27
28 test.build('defines.gyp')
29
30 expect = """\
31 FOO is defined
32 VALUE is 1
33 """
34 test.run_built_executable('defines', stdout=expect)
35
36 # Sleep so that the changed gyp file will have a newer timestamp than the
37 # previously generated build files.
38 test.sleep()
39 test.write('defines.gyp', test.read('defines-env.gyp'))
40
41 test.build('defines.gyp', test.ALL)
42
43 expect = """\
44 VALUE is 50
45 """
46 test.run_built_executable('defines', stdout=expect)
47
48 test.pass_test()
+0
-85
third_party/gyp/test/defines/gyptest-defines-env.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 # With the value only given in environment, it should be used.
16 try:
17 os.environ['GYP_DEFINES'] = 'value=10'
18 test.run_gyp('defines-env.gyp')
19 finally:
20 del os.environ['GYP_DEFINES']
21
22 test.build('defines-env.gyp')
23
24 expect = """\
25 VALUE is 10
26 """
27 test.run_built_executable('defines', stdout=expect)
28
29
30 # With the value given in both command line and environment,
31 # command line should take precedence.
32 try:
33 os.environ['GYP_DEFINES'] = 'value=20'
34 test.run_gyp('defines-env.gyp', '-Dvalue=25')
35 finally:
36 del os.environ['GYP_DEFINES']
37
38 test.sleep()
39 test.touch('defines.c')
40 test.build('defines-env.gyp')
41
42 expect = """\
43 VALUE is 25
44 """
45 test.run_built_executable('defines', stdout=expect)
46
47
48 # With the value only given in environment, it should be ignored if
49 # --ignore-environment is specified.
50 try:
51 os.environ['GYP_DEFINES'] = 'value=30'
52 test.run_gyp('defines-env.gyp', '--ignore-environment')
53 finally:
54 del os.environ['GYP_DEFINES']
55
56 test.sleep()
57 test.touch('defines.c')
58 test.build('defines-env.gyp')
59
60 expect = """\
61 VALUE is 5
62 """
63 test.run_built_executable('defines', stdout=expect)
64
65
66 # With the value given in both command line and environment, and
67 # --ignore-environment also specified, command line should still be used.
68 try:
69 os.environ['GYP_DEFINES'] = 'value=40'
70 test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45')
71 finally:
72 del os.environ['GYP_DEFINES']
73
74 test.sleep()
75 test.touch('defines.c')
76 test.build('defines-env.gyp')
77
78 expect = """\
79 VALUE is 45
80 """
81 test.run_built_executable('defines', stdout=expect)
82
83
84 test.pass_test()
+0
-25
third_party/gyp/test/defines/gyptest-defines.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ defines.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('defines.gyp')
15
16 test.build('defines.gyp')
17
18 expect = """\
19 FOO is defined
20 VALUE is 1
21 """
22 test.run_built_executable('defines', stdout=expect)
23
24 test.pass_test()
+0
-11
third_party/gyp/test/defines-escaping/defines-escaping.c less more
0 /* Copyright (c) 2010 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf(TEST_FORMAT, TEST_ARGS);
9 return 0;
10 }
+0
-19
third_party/gyp/test/defines-escaping/defines-escaping.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'defines_escaping',
8 'type': 'executable',
9 'sources': [
10 'defines-escaping.c',
11 ],
12 'defines': [
13 'TEST_FORMAT="<(test_format)"',
14 'TEST_ARGS=<(test_args)',
15 ],
16 },
17 ],
18 }
+0
-163
third_party/gyp/test/defines-escaping/gyptest-defines-escaping.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2010 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies build of an executable with C++ define specified by a gyp define using
8 various special characters such as quotes, commas, etc.
9 """
10
11 import os
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 # Tests string literals, percents, and backslash escapes.
17 try:
18 os.environ['GYP_DEFINES'] = \
19 """test_format='%s\\n' test_args='"Simple test of %s with a literal"'"""
20 test.run_gyp('defines-escaping.gyp')
21 finally:
22 del os.environ['GYP_DEFINES']
23
24 test.build('defines-escaping.gyp')
25
26 expect = """\
27 Simple test of %s with a literal
28 """
29 test.run_built_executable('defines_escaping', stdout=expect)
30
31
32 # Test multiple comma-and-space-separated string literals.
33 try:
34 os.environ['GYP_DEFINES'] = \
35 """test_format='%s and %s\\n' test_args='"foo", "bar"'"""
36 test.run_gyp('defines-escaping.gyp')
37 finally:
38 del os.environ['GYP_DEFINES']
39
40 test.sleep()
41 test.touch('defines-escaping.c')
42 test.build('defines-escaping.gyp')
43
44 expect = """\
45 foo and bar
46 """
47 test.run_built_executable('defines_escaping', stdout=expect)
48
49
50 # Test string literals containing quotes.
51 try:
52 os.environ['GYP_DEFINES'] = \
53 ("""test_format='%s %s %s %s %s\\n' """ +
54 """test_args='"\\"These,\\"",""" +
55 """ "\\"words,\\"","""
56 """ "\\"are,\\"",""" +
57 """ "\\"in,\\"",""" +
58 """ "\\"quotes.\\""'""")
59 test.run_gyp('defines-escaping.gyp')
60 finally:
61 del os.environ['GYP_DEFINES']
62
63 test.sleep()
64 test.touch('defines-escaping.c')
65 test.build('defines-escaping.gyp')
66
67 expect = """\
68 "These," "words," "are," "in," "quotes."
69 """
70 test.run_built_executable('defines_escaping', stdout=expect)
71
72
73 # Test string literals containing single quotes.
74 try:
75 os.environ['GYP_DEFINES'] = \
76 ("""test_format='%s %s %s %s %s\\n' """ +
77 """test_args="\\"'These,'\\",""" +
78 """ \\"'words,'\\","""
79 """ \\"'are,'\\",""" +
80 """ \\"'in,'\\",""" +
81 """ \\"'quotes.'\\"" """)
82 test.run_gyp('defines-escaping.gyp')
83 finally:
84 del os.environ['GYP_DEFINES']
85
86 test.sleep()
87 test.touch('defines-escaping.c')
88 test.build('defines-escaping.gyp')
89
90 expect = """\
91 'These,' 'words,' 'are,' 'in,' 'quotes.'
92 """
93 test.run_built_executable('defines_escaping', stdout=expect)
94
95
96 # Test string literals containing different numbers of backslashes before quotes
97 # (to exercise Windows' quoting behaviour).
98 try:
99 os.environ['GYP_DEFINES'] = \
100 ("""test_format='%s\\n%s\\n%s\\n' """ +
101 """test_args='"\\\\\\"1 visible slash\\\\\\"",""" +
102 """ "\\\\\\\\\\"2 visible slashes\\\\\\\\\\"","""
103 """ "\\\\\\\\\\\\\\"3 visible slashes\\\\\\\\\\\\\\""'""")
104 test.run_gyp('defines-escaping.gyp')
105 finally:
106 del os.environ['GYP_DEFINES']
107
108 test.sleep()
109 test.touch('defines-escaping.c')
110 test.build('defines-escaping.gyp')
111
112 expect = """\
113 \\"1 visible slash\\"
114 \\\\"2 visible slashes\\\\"
115 \\\\\\"3 visible slashes\\\\\\"
116 """
117 test.run_built_executable('defines_escaping', stdout=expect)
118
119
120 # Test that various scary sequences are passed unfettered.
121 try:
122 os.environ['GYP_DEFINES'] = \
123 ("""test_format='%s\\n' """ +
124 """test_args='"%PATH%, $foo, &quot; `foo`;"'""")
125 test.run_gyp('defines-escaping.gyp')
126 finally:
127 del os.environ['GYP_DEFINES']
128
129 test.sleep()
130 test.touch('defines-escaping.c')
131 test.build('defines-escaping.gyp')
132
133 expect = """\
134 %PATH%, $foo, &quot; `foo`;
135 """
136 test.run_built_executable('defines_escaping', stdout=expect)
137
138
139 # Test commas and semi-colons preceded by backslashes (to exercise Windows'
140 # quoting behaviour).
141 try:
142 os.environ['GYP_DEFINES'] = \
143 ("""test_format='%s\\n%s\\n' """ +
144 """test_args='"\\\\, \\\\\\\\;",""" +
145 # Same thing again, but enclosed in visible quotes.
146 """ "\\"\\\\, \\\\\\\\;\\""'""")
147 test.run_gyp('defines-escaping.gyp')
148 finally:
149 del os.environ['GYP_DEFINES']
150
151 test.sleep()
152 test.touch('defines-escaping.c')
153 test.build('defines-escaping.gyp')
154
155 expect = """\
156 \\, \\\\;
157 "\\, \\\\;"
158 """
159 test.run_built_executable('defines_escaping', stdout=expect)
160
161 # We deliberately do not test having an odd number of quotes in a string
162 # literal because that isn't feasible in MSVS.
+0
-9
third_party/gyp/test/dependencies/a.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 extern int funcB();
5
6 int funcA() {
7 return funcB();
8 }
+0
-3
third_party/gyp/test/dependencies/b/b.c less more
0 int funcB() {
1 return 2;
2 }
+0
-15
third_party/gyp/test/dependencies/b/b.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'b',
8 'type': 'static_library',
9 'sources': [
10 'b.c',
11 ],
12 },
13 ],
14 }
+0
-4
third_party/gyp/test/dependencies/c/c.c less more
0 int funcC() {
1 return 3
2 // Intentional syntax error. This file should never be compiled, so this
3 // shouldn't be a problem.
+0
-22
third_party/gyp/test/dependencies/c/c.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'c_unused',
8 'type': 'static_library',
9 'sources': [
10 'c.c',
11 ],
12 },
13 {
14 'target_name': 'd',
15 'type': 'static_library',
16 'sources': [
17 'd.c',
18 ],
19 },
20 ],
21 }
+0
-3
third_party/gyp/test/dependencies/c/d.c less more
0 int funcD() {
1 return 4;
2 }
+0
-18
third_party/gyp/test/dependencies/extra_targets.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 # This only depends on the "d" target; other targets in c.gyp
13 # should not become part of the build (unlike with 'c/c.gyp:*').
14 'dependencies': ['c/c.gyp:d'],
15 },
16 ],
17 }
+0
-21
third_party/gyp/test/dependencies/gyptest-extra-targets.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify that dependencies don't pull unused targets into the build.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('extra_targets.gyp')
15
16 # This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax
17 # error and won't compile.
18 test.build('extra_targets.gyp', test.ALL)
19
20 test.pass_test()
+0
-33
third_party/gyp/test/dependencies/gyptest-lib-only.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify that a link time only dependency will get pulled into the set of built
8 targets, even if no executable uses it.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('lib_only.gyp')
16
17 test.build('lib_only.gyp', test.ALL)
18
19 # Make doesn't put static libs in a common 'lib' directory, like it does with
20 # shared libs, so check in the obj path corresponding to the source path.
21 test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target')
22
23 # TODO(bradnelson/mark):
24 # On linux and windows a library target will at least pull its link dependencies
25 # into the generated sln/_main.scons, since not doing so confuses users.
26 # This is not currently implemented on mac, which has the opposite behavior.
27 if test.format == 'xcode':
28 test.built_file_must_not_exist('b', type=test.STATIC_LIB)
29 else:
30 test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b')
31
32 test.pass_test()
+0
-16
third_party/gyp/test/dependencies/lib_only.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 'dependencies': ['b/b.gyp:b'],
13 },
14 ],
15 }
+0
-26
third_party/gyp/test/dependency-copy/gyptest-copy.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies dependencies do the copy step.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('copies.gyp', chdir='src')
15
16 test.build('copies.gyp', 'proj2', chdir='src')
17
18 test.run_built_executable('proj1',
19 chdir='src',
20 stdout="Hello from file1.c\n")
21 test.run_built_executable('proj2',
22 chdir='src',
23 stdout="Hello from file2.c\n")
24
25 test.pass_test()
+0
-25
third_party/gyp/test/dependency-copy/src/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'proj1',
8 'type': 'executable',
9 'sources': [
10 'file1.c',
11 ],
12 },
13 {
14 'target_name': 'proj2',
15 'type': 'executable',
16 'sources': [
17 'file2.c',
18 ],
19 'dependencies': [
20 'proj1',
21 ]
22 },
23 ],
24 }
+0
-7
third_party/gyp/test/dependency-copy/src/file1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from file1.c\n");
5 return 0;
6 }
+0
-7
third_party/gyp/test/dependency-copy/src/file2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from file2.c\n");
5 return 0;
6 }
+0
-16
third_party/gyp/test/generator-output/actions/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-4
third_party/gyp/test/generator-output/actions/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/actions/subdir1/actions-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/actions/subdir1/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-44
third_party/gyp/test/generator-output/actions/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 ],
13 'actions': [
14 {
15 'action_name': 'make-prog1',
16 'inputs': [
17 'make-prog1.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/prog1.c',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 },
27 {
28 'action_name': 'make-prog2',
29 'inputs': [
30 'make-prog2.py',
31 ],
32 'outputs': [
33 'actions-out/prog2.c',
34 ],
35 'action': [
36 'python', '<(_inputs)', '<@(_outputs)',
37 ],
38 'process_outputs_as_sources': 1,
39 },
40 ],
41 },
42 ],
43 }
+0
-20
third_party/gyp/test/generator-output/actions/subdir1/make-prog1.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog1(void)
12 {
13 printf("Hello from make-prog1.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-20
third_party/gyp/test/generator-output/actions/subdir1/make-prog2.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = r"""
9 #include <stdio.h>
10
11 void prog2(void)
12 {
13 printf("Hello from make-prog2.py\n");
14 }
15 """
16
17 open(sys.argv[1], 'w').write(contents)
18
19 sys.exit(0)
+0
-12
third_party/gyp/test/generator-output/actions/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-4
third_party/gyp/test/generator-output/actions/subdir2/actions-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/actions/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-11
third_party/gyp/test/generator-output/actions/subdir2/make-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = "Hello from make-file.py\n"
9
10 open(sys.argv[1], 'wb').write(contents)
+0
-31
third_party/gyp/test/generator-output/actions/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'file',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'actions': [
11 {
12 'action_name': 'make-file',
13 'inputs': [
14 'make-file.py',
15 ],
16 'outputs': [
17 'actions-out/file.out',
18 # TODO: enhance testing infrastructure to test this
19 # without having to hard-code the intermediate dir paths.
20 #'<(INTERMEDIATE_DIR)/file.out',
21 ],
22 'action': [
23 'python', '<(_inputs)', '<@(_outputs)',
24 ],
25 'process_outputs_as_sources': 1,
26 }
27 ],
28 },
29 ],
30 }
+0
-4
third_party/gyp/test/generator-output/copies/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/copies/copies-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-50
third_party/gyp/test/generator-output/copies/copies.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_subdir',
8 'type': 'none',
9 'dependencies': [
10 'subdir/subdir.gyp:*',
11 ],
12 },
13 {
14 'target_name': 'copies1',
15 'type': 'none',
16 'copies': [
17 {
18 'destination': 'copies-out',
19 'files': [
20 'file1',
21 ],
22 },
23 ],
24 },
25 {
26 'target_name': 'copies2',
27 'type': 'none',
28 'copies': [
29 {
30 'destination': '<(PRODUCT_DIR)/copies-out',
31 'files': [
32 'file2',
33 ],
34 },
35 ],
36 },
37 # Verify that a null 'files' list doesn't gag the generators.
38 {
39 'target_name': 'copies_null',
40 'type': 'none',
41 'copies': [
42 {
43 'destination': '<(PRODUCT_DIR)/copies-null',
44 'files': [],
45 },
46 ],
47 },
48 ],
49 }
+0
-1
third_party/gyp/test/generator-output/copies/file1 less more
0 file1 contents
+0
-1
third_party/gyp/test/generator-output/copies/file2 less more
0 file2 contents
+0
-4
third_party/gyp/test/generator-output/copies/subdir/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/copies/subdir/copies-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
third_party/gyp/test/generator-output/copies/subdir/file3 less more
0 file3 contents
+0
-1
third_party/gyp/test/generator-output/copies/subdir/file4 less more
0 file4 contents
+0
-32
third_party/gyp/test/generator-output/copies/subdir/subdir.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'copies3',
8 'type': 'none',
9 'copies': [
10 {
11 'destination': 'copies-out',
12 'files': [
13 'file3',
14 ],
15 },
16 ],
17 },
18 {
19 'target_name': 'copies4',
20 'type': 'none',
21 'copies': [
22 {
23 'destination': '<(PRODUCT_DIR)/copies-out',
24 'files': [
25 'file4',
26 ],
27 },
28 ],
29 },
30 ],
31 }
+0
-57
third_party/gyp/test/generator-output/gyptest-actions.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies --generator-output= behavior when using actions.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 # All the generated files should go under 'gypfiles'. The source directory
15 # ('actions') should be untouched.
16 test.writable(test.workpath('actions'), False)
17 test.run_gyp('actions.gyp',
18 '--generator-output=' + test.workpath('gypfiles'),
19 chdir='actions')
20
21 test.writable(test.workpath('actions'), True)
22
23 test.relocate('actions', 'relocate/actions')
24 test.relocate('gypfiles', 'relocate/gypfiles')
25
26 test.writable(test.workpath('relocate/actions'), False)
27
28 # Some of the action outputs use "pure" relative paths (i.e. without prefixes
29 # like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under
30 # 'gypfiles', such outputs will still be created relative to the original .gyp
31 # sources. Projects probably wouldn't normally do this, since it kind of defeats
32 # the purpose of '--generator-output', but it is supported behaviour.
33 test.writable(test.workpath('relocate/actions/build'), True)
34 test.writable(test.workpath('relocate/actions/subdir1/build'), True)
35 test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True)
36 test.writable(test.workpath('relocate/actions/subdir2/build'), True)
37 test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True)
38
39 test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles')
40
41 expect = """\
42 Hello from program.c
43 Hello from make-prog1.py
44 Hello from make-prog2.py
45 """
46
47 if test.format == 'xcode':
48 chdir = 'relocate/actions/subdir1'
49 else:
50 chdir = 'relocate/gypfiles'
51 test.run_built_executable('program', chdir=chdir, stdout=expect)
52
53 test.must_match('relocate/actions/subdir2/actions-out/file.out',
54 "Hello from make-file.py\n")
55
56 test.pass_test()
+0
-57
third_party/gyp/test/generator-output/gyptest-copies.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies file copies using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.writable(test.workpath('copies'), False)
15
16 test.run_gyp('copies.gyp',
17 '--generator-output=' + test.workpath('gypfiles'),
18 chdir='copies')
19
20 test.writable(test.workpath('copies'), True)
21
22 test.relocate('copies', 'relocate/copies')
23 test.relocate('gypfiles', 'relocate/gypfiles')
24
25 test.writable(test.workpath('relocate/copies'), False)
26
27 test.writable(test.workpath('relocate/copies/build'), True)
28 test.writable(test.workpath('relocate/copies/copies-out'), True)
29 test.writable(test.workpath('relocate/copies/subdir/build'), True)
30 test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
31
32 test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
33
34 test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
35 "file1 contents\n")
36
37 if test.format == 'xcode':
38 chdir = 'relocate/copies/build'
39 elif test.format == 'make':
40 chdir = 'relocate/gypfiles/out'
41 else:
42 chdir = 'relocate/gypfiles'
43 test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
44
45 test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
46 "file3 contents\n")
47
48 if test.format == 'xcode':
49 chdir = 'relocate/copies/subdir/build'
50 elif test.format == 'make':
51 chdir = 'relocate/gypfiles/out'
52 else:
53 chdir = 'relocate/gypfiles'
54 test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
55
56 test.pass_test()
+0
-59
third_party/gyp/test/generator-output/gyptest-relocate.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a project hierarchy created with the --generator-output=
8 option can be built even when it's relocated to a different path.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.writable(test.workpath('src'), False)
16
17 test.run_gyp('prog1.gyp',
18 '-Dset_symroot=1',
19 '--generator-output=' + test.workpath('gypfiles'),
20 chdir='src')
21
22 test.writable(test.workpath('src'), True)
23
24 test.relocate('src', 'relocate/src')
25 test.relocate('gypfiles', 'relocate/gypfiles')
26
27 test.writable(test.workpath('relocate/src'), False)
28
29 test.writable(test.workpath('relocate/src/build'), True)
30 test.writable(test.workpath('relocate/src/subdir2/build'), True)
31 test.writable(test.workpath('relocate/src/subdir3/build'), True)
32
33 test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
34
35 chdir = 'relocate/gypfiles'
36
37 expect = """\
38 Hello from %s
39 Hello from inc.h
40 Hello from inc1/include1.h
41 Hello from inc2/include2.h
42 Hello from inc3/include3.h
43 Hello from subdir2/deeper/deeper.h
44 """
45
46 if test.format == 'xcode':
47 chdir = 'relocate/src'
48 test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
49
50 if test.format == 'xcode':
51 chdir = 'relocate/src/subdir2'
52 test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
53
54 if test.format == 'xcode':
55 chdir = 'relocate/src/subdir3'
56 test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
57
58 test.pass_test()
+0
-58
third_party/gyp/test/generator-output/gyptest-rules.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies --generator-output= behavior when using rules.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.writable(test.workpath('rules'), False)
15
16 test.run_gyp('rules.gyp',
17 '--generator-output=' + test.workpath('gypfiles'),
18 chdir='rules')
19
20 test.writable(test.workpath('rules'), True)
21
22 test.relocate('rules', 'relocate/rules')
23 test.relocate('gypfiles', 'relocate/gypfiles')
24
25 test.writable(test.workpath('relocate/rules'), False)
26
27 test.writable(test.workpath('relocate/rules/build'), True)
28 test.writable(test.workpath('relocate/rules/subdir1/build'), True)
29 test.writable(test.workpath('relocate/rules/subdir2/build'), True)
30 test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
31
32 test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
33
34 expect = """\
35 Hello from program.c
36 Hello from function1.in1
37 Hello from function2.in1
38 Hello from define3.in0
39 Hello from define4.in0
40 """
41
42 if test.format == 'xcode':
43 chdir = 'relocate/rules/subdir1'
44 else:
45 chdir = 'relocate/gypfiles'
46 test.run_built_executable('program', chdir=chdir, stdout=expect)
47
48 test.must_match('relocate/rules/subdir2/rules-out/file1.out',
49 "Hello from file1.in0\n")
50 test.must_match('relocate/rules/subdir2/rules-out/file2.out',
51 "Hello from file2.in0\n")
52 test.must_match('relocate/rules/subdir2/rules-out/file3.out',
53 "Hello from file3.in1\n")
54 test.must_match('relocate/rules/subdir2/rules-out/file4.out',
55 "Hello from file4.in1\n")
56
57 test.pass_test()
+0
-36
third_party/gyp/test/generator-output/gyptest-subdir2-deep.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target from a .gyp file a few subdirectories
8 deep when the --generator-output= option is used to put the build
9 configuration files in a separate directory tree.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.writable(test.workpath('src'), False)
17
18 test.writable(test.workpath('src/subdir2/deeper/build'), True)
19
20 test.run_gyp('deeper.gyp',
21 '-Dset_symroot=1',
22 '--generator-output=' + test.workpath('gypfiles'),
23 chdir='src/subdir2/deeper')
24
25 test.build('deeper.gyp', test.ALL, chdir='gypfiles')
26
27 chdir = 'gypfiles'
28
29 if test.format == 'xcode':
30 chdir = 'src/subdir2/deeper'
31 test.run_built_executable('deeper',
32 chdir=chdir,
33 stdout="Hello from deeper.c\n")
34
35 test.pass_test()
+0
-53
third_party/gyp/test/generator-output/gyptest-top-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a project hierarchy created when the --generator-output=
8 option is used to put the build configuration files in a separate
9 directory tree.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.writable(test.workpath('src'), False)
17
18 test.run_gyp('prog1.gyp',
19 '-Dset_symroot=1',
20 '--generator-output=' + test.workpath('gypfiles'),
21 chdir='src')
22
23 test.writable(test.workpath('src/build'), True)
24 test.writable(test.workpath('src/subdir2/build'), True)
25 test.writable(test.workpath('src/subdir3/build'), True)
26
27 test.build('prog1.gyp', test.ALL, chdir='gypfiles')
28
29 chdir = 'gypfiles'
30
31 expect = """\
32 Hello from %s
33 Hello from inc.h
34 Hello from inc1/include1.h
35 Hello from inc2/include2.h
36 Hello from inc3/include3.h
37 Hello from subdir2/deeper/deeper.h
38 """
39
40 if test.format == 'xcode':
41 chdir = 'src'
42 test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
43
44 if test.format == 'xcode':
45 chdir = 'src/subdir2'
46 test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
47
48 if test.format == 'xcode':
49 chdir = 'src/subdir3'
50 test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
51
52 test.pass_test()
+0
-4
third_party/gyp/test/generator-output/rules/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-12
third_party/gyp/test/generator-output/rules/copy-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 contents = open(sys.argv[1], 'r').read()
9 open(sys.argv[2], 'wb').write(contents)
10
11 sys.exit(0)
+0
-16
third_party/gyp/test/generator-output/rules/rules.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-4
third_party/gyp/test/generator-output/rules/subdir1/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
third_party/gyp/test/generator-output/rules/subdir1/define3.in0 less more
0 #define STRING3 "Hello from define3.in0\n"
+0
-1
third_party/gyp/test/generator-output/rules/subdir1/define4.in0 less more
0 #define STRING4 "Hello from define4.in0\n"
+0
-59
third_party/gyp/test/generator-output/rules/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 'function1.in1',
13 'function2.in1',
14 'define3.in0',
15 'define4.in0',
16 ],
17 'include_dirs': [
18 '<(INTERMEDIATE_DIR)',
19 ],
20 'rules': [
21 {
22 'rule_name': 'copy_file_0',
23 'extension': 'in0',
24 'inputs': [
25 '../copy-file.py',
26 ],
27 'outputs': [
28 # TODO: fix SCons and Make to support generated files not
29 # in a variable-named path like <(INTERMEDIATE_DIR)
30 #'<(RULE_INPUT_ROOT).c',
31 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
32 ],
33 'action': [
34 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
35 ],
36 'process_outputs_as_sources': 0,
37 },
38 {
39 'rule_name': 'copy_file_1',
40 'extension': 'in1',
41 'inputs': [
42 '../copy-file.py',
43 ],
44 'outputs': [
45 # TODO: fix SCons and Make to support generated files not
46 # in a variable-named path like <(INTERMEDIATE_DIR)
47 #'<(RULE_INPUT_ROOT).c',
48 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
49 ],
50 'action': [
51 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
52 ],
53 'process_outputs_as_sources': 1,
54 },
55 ],
56 },
57 ],
58 }
+0
-6
third_party/gyp/test/generator-output/rules/subdir1/function1.in1 less more
0 #include <stdio.h>
1
2 void function1(void)
3 {
4 printf("Hello from function1.in1\n");
5 }
+0
-6
third_party/gyp/test/generator-output/rules/subdir1/function2.in1 less more
0 #include <stdio.h>
1
2 void function2(void)
3 {
4 printf("Hello from function2.in1\n");
5 }
+0
-18
third_party/gyp/test/generator-output/rules/subdir1/program.c less more
0 #include <stdio.h>
1 #include "define3.h"
2 #include "define4.h"
3
4 extern void function1(void);
5 extern void function2(void);
6 extern void function3(void);
7 extern void function4(void);
8
9 int main(int argc, char *argv[])
10 {
11 printf("Hello from program.c\n");
12 function1();
13 function2();
14 printf("%s", STRING3);
15 printf("%s", STRING4);
16 return 0;
17 }
+0
-4
third_party/gyp/test/generator-output/rules/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
third_party/gyp/test/generator-output/rules/subdir2/file1.in0 less more
0 Hello from file1.in0
+0
-1
third_party/gyp/test/generator-output/rules/subdir2/file2.in0 less more
0 Hello from file2.in0
+0
-1
third_party/gyp/test/generator-output/rules/subdir2/file3.in1 less more
0 Hello from file3.in1
+0
-1
third_party/gyp/test/generator-output/rules/subdir2/file4.in1 less more
0 Hello from file4.in1
+0
-49
third_party/gyp/test/generator-output/rules/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'files',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'file1.in0',
12 'file2.in0',
13 'file3.in1',
14 'file4.in1',
15 ],
16 'rules': [
17 {
18 'rule_name': 'copy_file_0',
19 'extension': 'in0',
20 'inputs': [
21 '../copy-file.py',
22 ],
23 'outputs': [
24 'rules-out/<(RULE_INPUT_ROOT).out',
25 ],
26 'action': [
27 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
28 ],
29 'process_outputs_as_sources': 0,
30 },
31 {
32 'rule_name': 'copy_file_1',
33 'extension': 'in1',
34 'inputs': [
35 '../copy-file.py',
36 ],
37 'outputs': [
38 'rules-out/<(RULE_INPUT_ROOT).out',
39 ],
40 'action': [
41 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
42 ],
43 'process_outputs_as_sources': 1,
44 },
45 ],
46 },
47 ],
48 }
+0
-4
third_party/gyp/test/generator-output/rules/subdir2/rules-out/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/src/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
third_party/gyp/test/generator-output/src/inc.h less more
0 #define INC_STRING "inc.h"
+0
-1
third_party/gyp/test/generator-output/src/inc1/include1.h less more
0 #define INCLUDE1_STRING "inc1/include1.h"
+0
-18
third_party/gyp/test/generator-output/src/prog1.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog1.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-28
third_party/gyp/test/generator-output/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog1',
11 'type': 'executable',
12 'dependencies': [
13 'subdir2/prog2.gyp:prog2',
14 ],
15 'include_dirs': [
16 '.',
17 'inc1',
18 'subdir2/inc2',
19 'subdir3/inc3',
20 'subdir2/deeper',
21 ],
22 'sources': [
23 'prog1.c',
24 ],
25 },
26 ],
27 }
+0
-4
third_party/gyp/test/generator-output/src/subdir2/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-4
third_party/gyp/test/generator-output/src/subdir2/deeper/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-7
third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from deeper.c\n");
5 return 0;
6 }
+0
-18
third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'deeper',
11 'type': 'executable',
12 'sources': [
13 'deeper.c',
14 ],
15 },
16 ],
17 }
+0
-1
third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.h less more
0 #define DEEPER_STRING "subdir2/deeper/deeper.h"
+0
-1
third_party/gyp/test/generator-output/src/subdir2/inc2/include2.h less more
0 #define INCLUDE2_STRING "inc2/include2.h"
+0
-18
third_party/gyp/test/generator-output/src/subdir2/prog2.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog2.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-28
third_party/gyp/test/generator-output/src/subdir2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'include_dirs': [
13 '..',
14 '../inc1',
15 'inc2',
16 '../subdir3/inc3',
17 'deeper',
18 ],
19 'dependencies': [
20 '../subdir3/prog3.gyp:prog3',
21 ],
22 'sources': [
23 'prog2.c',
24 ],
25 },
26 ],
27 }
+0
-4
third_party/gyp/test/generator-output/src/subdir3/build/README.txt less more
0 A place-holder for this Xcode build output directory, so that the
1 test script can verify that .xcodeproj files are not created in
2 their normal location by making the src/ read-only, and then
3 selectively making this build directory writable.
+0
-1
third_party/gyp/test/generator-output/src/subdir3/inc3/include3.h less more
0 #define INCLUDE3_STRING "inc3/include3.h"
+0
-18
third_party/gyp/test/generator-output/src/subdir3/prog3.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5 #include "include3.h"
6 #include "deeper.h"
7
8 int main(int argc, char *argv[])
9 {
10 printf("Hello from prog3.c\n");
11 printf("Hello from %s\n", INC_STRING);
12 printf("Hello from %s\n", INCLUDE1_STRING);
13 printf("Hello from %s\n", INCLUDE2_STRING);
14 printf("Hello from %s\n", INCLUDE3_STRING);
15 printf("Hello from %s\n", DEEPER_STRING);
16 return 0;
17 }
+0
-25
third_party/gyp/test/generator-output/src/subdir3/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'include_dirs': [
13 '..',
14 '../inc1',
15 '../subdir2/inc2',
16 'inc3',
17 '../subdir2/deeper',
18 ],
19 'sources': [
20 'prog3.c',
21 ],
22 },
23 ],
24 }
+0
-16
third_party/gyp/test/generator-output/src/symroot.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'set_symroot%': 0,
7 },
8 'conditions': [
9 ['set_symroot == 1', {
10 'xcode_settings': {
11 'SYMROOT': '<(DEPTH)/build',
12 },
13 }],
14 ],
15 }
+0
-24
third_party/gyp/test/hello/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using an explicit build target of 'all'.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp', test.ALL)
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', test.ALL)
22
23 test.pass_test()
+0
-24
third_party/gyp/test/hello/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using the default build target.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp')
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', test.DEFAULT)
22
23 test.pass_test()
+0
-32
third_party/gyp/test/hello/gyptest-disable-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that Makefiles don't get rebuilt when a source gyp file changes and
8 the disable_regeneration generator flag is set.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
16
17 test.build('hello.gyp', test.ALL)
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 # Sleep so that the changed gyp file will have a newer timestamp than the
22 # previously generated build files.
23 test.sleep()
24 test.write('hello.gyp', test.read('hello2.gyp'))
25
26 test.build('hello.gyp', test.ALL)
27
28 # Should still be the old executable, as regeneration was disabled.
29 test.run_built_executable('hello', stdout="Hello, world!\n")
30
31 test.pass_test()
+0
-32
third_party/gyp/test/hello/gyptest-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that Makefiles get rebuilt when a source gyp file changes.
8 """
9
10 import TestGyp
11
12 # Regenerating build files when a gyp file changes is currently only supported
13 # by the make generator.
14 test = TestGyp.TestGyp(formats=['make'])
15
16 test.run_gyp('hello.gyp')
17
18 test.build('hello.gyp', test.ALL)
19
20 test.run_built_executable('hello', stdout="Hello, world!\n")
21
22 # Sleep so that the changed gyp file will have a newer timestamp than the
23 # previously generated build files.
24 test.sleep()
25 test.write('hello.gyp', test.read('hello2.gyp'))
26
27 test.build('hello.gyp', test.ALL)
28
29 test.run_built_executable('hello', stdout="Hello, two!\n")
30
31 test.pass_test()
+0
-24
third_party/gyp/test/hello/gyptest-target.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using an explicit build target of 'hello'.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('hello.gyp')
16
17 test.build('hello.gyp', 'hello')
18
19 test.run_built_executable('hello', stdout="Hello, world!\n")
20
21 test.up_to_date('hello.gyp', 'hello')
22
23 test.pass_test()
+0
-11
third_party/gyp/test/hello/hello.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello, world!\n");
9 return 0;
10 }
+0
-15
third_party/gyp/test/hello/hello.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello',
8 'type': 'executable',
9 'sources': [
10 'hello.c',
11 ],
12 },
13 ],
14 }
+0
-11
third_party/gyp/test/hello/hello2.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello, two!\n");
9 return 0;
10 }
+0
-15
third_party/gyp/test/hello/hello2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello',
8 'type': 'executable',
9 'sources': [
10 'hello2.c',
11 ],
12 },
13 ],
14 }
+0
-44
third_party/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation
8 and with regeneration.
9 """
10
11 import os
12 import TestGyp
13
14 # Regenerating build files when a gyp file changes is currently only supported
15 # by the make generator.
16 test = TestGyp.TestGyp(formats=['make'])
17
18 os.environ['HOME'] = os.path.abspath('home')
19
20 test.run_gyp('all.gyp', chdir='src')
21
22 # After relocating, we should still be able to build (build file shouldn't
23 # contain relative reference to ~/.gyp/includes.gypi)
24 test.relocate('src', 'relocate/src')
25
26 test.build('all.gyp', test.ALL, chdir='relocate/src')
27
28 test.run_built_executable('printfoo',
29 chdir='relocate/src',
30 stdout="FOO is fromhome\n");
31
32 # Building should notice any changes to ~/.gyp/includes.gypi and regyp.
33 test.sleep()
34
35 test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
36
37 test.build('all.gyp', test.ALL, chdir='relocate/src')
38
39 test.run_built_executable('printfoo',
40 chdir='relocate/src',
41 stdout="FOO is fromhome2\n");
42
43 test.pass_test()
+0
-30
third_party/gyp/test/home_dot_gyp/gyptest-home-includes.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies inclusion of $HOME/.gyp/includes.gypi works.
8 """
9
10 import os
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 os.environ['HOME'] = os.path.abspath('home')
16
17 test.run_gyp('all.gyp', chdir='src')
18
19 # After relocating, we should still be able to build (build file shouldn't
20 # contain relative reference to ~/.gyp/includes.gypi)
21 test.relocate('src', 'relocate/src')
22
23 test.build('all.gyp', test.ALL, chdir='relocate/src')
24
25 test.run_built_executable('printfoo',
26 chdir='relocate/src',
27 stdout="FOO is fromhome\n");
28
29 test.pass_test()
+0
-5
third_party/gyp/test/home_dot_gyp/home/.gyp/include.gypi less more
0 {
1 'variables': {
2 'foo': '"fromhome"',
3 },
4 }
+0
-5
third_party/gyp/test/home_dot_gyp/home2/.gyp/include.gypi less more
0 {
1 'variables': {
2 'foo': '"fromhome2"',
3 },
4 }
+0
-22
third_party/gyp/test/home_dot_gyp/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'foo%': '"fromdefault"',
7 },
8 'targets': [
9 {
10 'target_name': 'printfoo',
11 'type': 'executable',
12 'sources': [
13 'printfoo.c',
14 ],
15 'defines': [
16 'FOO=<(foo)',
17 ],
18 },
19 ],
20 }
21
+0
-7
third_party/gyp/test/home_dot_gyp/src/printfoo.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("FOO is %s\n", FOO);
5 return 0;
6 }
+0
-42
third_party/gyp/test/include_dirs/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies use of include_dirs when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('includes.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('includes.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from includes.c
22 Hello from inc.h
23 Hello from include1.h
24 Hello from subdir/inc2/include2.h
25 """
26 test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
27
28 if test.format == 'xcode':
29 chdir='relocate/src/subdir'
30 else:
31 chdir='relocate/src'
32
33 expect = """\
34 Hello from subdir/subdir_includes.c
35 Hello from subdir/inc.h
36 Hello from include1.h
37 Hello from subdir/inc2/include2.h
38 """
39 test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
40
41 test.pass_test()
+0
-42
third_party/gyp/test/include_dirs/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies use of include_dirs when using the default build target.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('includes.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('includes.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from includes.c
22 Hello from inc.h
23 Hello from include1.h
24 Hello from subdir/inc2/include2.h
25 """
26 test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
27
28 if test.format == 'xcode':
29 chdir='relocate/src/subdir'
30 else:
31 chdir='relocate/src'
32
33 expect = """\
34 Hello from subdir/subdir_includes.c
35 Hello from subdir/inc.h
36 Hello from include1.h
37 Hello from subdir/inc2/include2.h
38 """
39 test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
40
41 test.pass_test()
+0
-1
third_party/gyp/test/include_dirs/src/inc.h less more
0 #define INC_STRING "inc.h"
+0
-1
third_party/gyp/test/include_dirs/src/inc1/include1.h less more
0 #define INCLUDE1_STRING "include1.h"
+0
-14
third_party/gyp/test/include_dirs/src/includes.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello from includes.c\n");
9 printf("Hello from %s\n", INC_STRING);
10 printf("Hello from %s\n", INCLUDE1_STRING);
11 printf("Hello from %s\n", INCLUDE2_STRING);
12 return 0;
13 }
+0
-23
third_party/gyp/test/include_dirs/src/includes.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'includes',
8 'type': 'executable',
9 'dependencies': [
10 'subdir/subdir_includes.gyp:subdir_includes',
11 ],
12 'include_dirs': [
13 '.',
14 'inc1',
15 'subdir/inc2',
16 ],
17 'sources': [
18 'includes.c',
19 ],
20 },
21 ],
22 }
+0
-1
third_party/gyp/test/include_dirs/src/subdir/inc.h less more
0 #define INC_STRING "subdir/inc.h"
+0
-1
third_party/gyp/test/include_dirs/src/subdir/inc2/include2.h less more
0 #define INCLUDE2_STRING "subdir/inc2/include2.h"
+0
-14
third_party/gyp/test/include_dirs/src/subdir/subdir_includes.c less more
0 #include <stdio.h>
1
2 #include "inc.h"
3 #include "include1.h"
4 #include "include2.h"
5
6 int main(int argc, char *argv[])
7 {
8 printf("Hello from subdir/subdir_includes.c\n");
9 printf("Hello from %s\n", INC_STRING);
10 printf("Hello from %s\n", INCLUDE1_STRING);
11 printf("Hello from %s\n", INCLUDE2_STRING);
12 return 0;
13 }
+0
-20
third_party/gyp/test/include_dirs/src/subdir/subdir_includes.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'subdir_includes',
8 'type': 'executable',
9 'include_dirs': [
10 '.',
11 '../inc1',
12 'inc2',
13 ],
14 'sources': [
15 'subdir_includes.c',
16 ],
17 },
18 ],
19 }
+0
-17
third_party/gyp/test/lib/README.txt less more
0 Supporting modules for GYP testing.
1
2 TestCmd.py
3 TestCommon.py
4
5 Modules for generic testing of command-line utilities,
6 specifically including the ability to copy a test configuration
7 to temporary directories (with default cleanup on exit) as part
8 of running test scripts that invoke commands, compare actual
9 against expected output, etc.
10
11 Our copies of these come from the SCons project,
12 http://www.scons.org/.
13
14 TestGyp.py
15
16 Modules for GYP-specific tests, of course.
+0
-1591
third_party/gyp/test/lib/TestCmd.py less more
0 """
1 TestCmd.py: a testing framework for commands and scripts.
2
3 The TestCmd module provides a framework for portable automated testing
4 of executable commands and scripts (in any language, not just Python),
5 especially commands and scripts that require file system interaction.
6
7 In addition to running tests and evaluating conditions, the TestCmd
8 module manages and cleans up one or more temporary workspace
9 directories, and provides methods for creating files and directories in
10 those workspace directories from in-line data, here-documents), allowing
11 tests to be completely self-contained.
12
13 A TestCmd environment object is created via the usual invocation:
14
15 import TestCmd
16 test = TestCmd.TestCmd()
17
18 There are a bunch of keyword arguments available at instantiation:
19
20 test = TestCmd.TestCmd(description = 'string',
21 program = 'program_or_script_to_test',
22 interpreter = 'script_interpreter',
23 workdir = 'prefix',
24 subdir = 'subdir',
25 verbose = Boolean,
26 match = default_match_function,
27 diff = default_diff_function,
28 combine = Boolean)
29
30 There are a bunch of methods that let you do different things:
31
32 test.verbose_set(1)
33
34 test.description_set('string')
35
36 test.program_set('program_or_script_to_test')
37
38 test.interpreter_set('script_interpreter')
39 test.interpreter_set(['script_interpreter', 'arg'])
40
41 test.workdir_set('prefix')
42 test.workdir_set('')
43
44 test.workpath('file')
45 test.workpath('subdir', 'file')
46
47 test.subdir('subdir', ...)
48
49 test.rmdir('subdir', ...)
50
51 test.write('file', "contents\n")
52 test.write(['subdir', 'file'], "contents\n")
53
54 test.read('file')
55 test.read(['subdir', 'file'])
56 test.read('file', mode)
57 test.read(['subdir', 'file'], mode)
58
59 test.writable('dir', 1)
60 test.writable('dir', None)
61
62 test.preserve(condition, ...)
63
64 test.cleanup(condition)
65
66 test.command_args(program = 'program_or_script_to_run',
67 interpreter = 'script_interpreter',
68 arguments = 'arguments to pass to program')
69
70 test.run(program = 'program_or_script_to_run',
71 interpreter = 'script_interpreter',
72 arguments = 'arguments to pass to program',
73 chdir = 'directory_to_chdir_to',
74 stdin = 'input to feed to the program\n')
75 universal_newlines = True)
76
77 p = test.start(program = 'program_or_script_to_run',
78 interpreter = 'script_interpreter',
79 arguments = 'arguments to pass to program',
80 universal_newlines = None)
81
82 test.finish(self, p)
83
84 test.pass_test()
85 test.pass_test(condition)
86 test.pass_test(condition, function)
87
88 test.fail_test()
89 test.fail_test(condition)
90 test.fail_test(condition, function)
91 test.fail_test(condition, function, skip)
92
93 test.no_result()
94 test.no_result(condition)
95 test.no_result(condition, function)
96 test.no_result(condition, function, skip)
97
98 test.stdout()
99 test.stdout(run)
100
101 test.stderr()
102 test.stderr(run)
103
104 test.symlink(target, link)
105
106 test.banner(string)
107 test.banner(string, width)
108
109 test.diff(actual, expected)
110
111 test.match(actual, expected)
112
113 test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
114 test.match_exact(["actual 1\n", "actual 2\n"],
115 ["expected 1\n", "expected 2\n"])
116
117 test.match_re("actual 1\nactual 2\n", regex_string)
118 test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
119
120 test.match_re_dotall("actual 1\nactual 2\n", regex_string)
121 test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
122
123 test.tempdir()
124 test.tempdir('temporary-directory')
125
126 test.sleep()
127 test.sleep(seconds)
128
129 test.where_is('foo')
130 test.where_is('foo', 'PATH1:PATH2')
131 test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
132
133 test.unlink('file')
134 test.unlink('subdir', 'file')
135
136 The TestCmd module provides pass_test(), fail_test(), and no_result()
137 unbound functions that report test results for use with the Aegis change
138 management system. These methods terminate the test immediately,
139 reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
140 status 0 (success), 1 or 2 respectively. This allows for a distinction
141 between an actual failed test and a test that could not be properly
142 evaluated because of an external condition (such as a full file system
143 or incorrect permissions).
144
145 import TestCmd
146
147 TestCmd.pass_test()
148 TestCmd.pass_test(condition)
149 TestCmd.pass_test(condition, function)
150
151 TestCmd.fail_test()
152 TestCmd.fail_test(condition)
153 TestCmd.fail_test(condition, function)
154 TestCmd.fail_test(condition, function, skip)
155
156 TestCmd.no_result()
157 TestCmd.no_result(condition)
158 TestCmd.no_result(condition, function)
159 TestCmd.no_result(condition, function, skip)
160
161 The TestCmd module also provides unbound functions that handle matching
162 in the same way as the match_*() methods described above.
163
164 import TestCmd
165
166 test = TestCmd.TestCmd(match = TestCmd.match_exact)
167
168 test = TestCmd.TestCmd(match = TestCmd.match_re)
169
170 test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
171
172 The TestCmd module provides unbound functions that can be used for the
173 "diff" argument to TestCmd.TestCmd instantiation:
174
175 import TestCmd
176
177 test = TestCmd.TestCmd(match = TestCmd.match_re,
178 diff = TestCmd.diff_re)
179
180 test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
181
182 The "diff" argument can also be used with standard difflib functions:
183
184 import difflib
185
186 test = TestCmd.TestCmd(diff = difflib.context_diff)
187
188 test = TestCmd.TestCmd(diff = difflib.unified_diff)
189
190 Lastly, the where_is() method also exists in an unbound function
191 version.
192
193 import TestCmd
194
195 TestCmd.where_is('foo')
196 TestCmd.where_is('foo', 'PATH1:PATH2')
197 TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
198 """
199
200 # Copyright 2000-2010 Steven Knight
201 # This module is free software, and you may redistribute it and/or modify
202 # it under the same terms as Python itself, so long as this copyright message
203 # and disclaimer are retained in their original form.
204 #
205 # IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
206 # SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
207 # THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
208 # DAMAGE.
209 #
210 # THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
211 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
212 # PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
213 # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
214 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
215
216 __author__ = "Steven Knight <knight at baldmt dot com>"
217 __revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight"
218 __version__ = "0.37"
219
220 import errno
221 import os
222 import os.path
223 import re
224 import shutil
225 import stat
226 import string
227 import sys
228 import tempfile
229 import time
230 import traceback
231 import types
232 import UserList
233
234 __all__ = [
235 'diff_re',
236 'fail_test',
237 'no_result',
238 'pass_test',
239 'match_exact',
240 'match_re',
241 'match_re_dotall',
242 'python_executable',
243 'TestCmd'
244 ]
245
246 try:
247 import difflib
248 except ImportError:
249 __all__.append('simple_diff')
250
251 def is_List(e):
252 return type(e) is types.ListType \
253 or isinstance(e, UserList.UserList)
254
255 try:
256 from UserString import UserString
257 except ImportError:
258 class UserString:
259 pass
260
261 if hasattr(types, 'UnicodeType'):
262 def is_String(e):
263 return type(e) is types.StringType \
264 or type(e) is types.UnicodeType \
265 or isinstance(e, UserString)
266 else:
267 def is_String(e):
268 return type(e) is types.StringType or isinstance(e, UserString)
269
270 tempfile.template = 'testcmd.'
271 if os.name in ('posix', 'nt'):
272 tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
273 else:
274 tempfile.template = 'testcmd.'
275
276 re_space = re.compile('\s')
277
278 _Cleanup = []
279
280 _chain_to_exitfunc = None
281
282 def _clean():
283 global _Cleanup
284 cleanlist = filter(None, _Cleanup)
285 del _Cleanup[:]
286 cleanlist.reverse()
287 for test in cleanlist:
288 test.cleanup()
289 if _chain_to_exitfunc:
290 _chain_to_exitfunc()
291
292 try:
293 import atexit
294 except ImportError:
295 # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
296 try:
297 _chain_to_exitfunc = sys.exitfunc
298 except AttributeError:
299 pass
300 sys.exitfunc = _clean
301 else:
302 atexit.register(_clean)
303
304 try:
305 zip
306 except NameError:
307 def zip(*lists):
308 result = []
309 for i in xrange(min(map(len, lists))):
310 result.append(tuple(map(lambda l, i=i: l[i], lists)))
311 return result
312
313 class Collector:
314 def __init__(self, top):
315 self.entries = [top]
316 def __call__(self, arg, dirname, names):
317 pathjoin = lambda n, d=dirname: os.path.join(d, n)
318 self.entries.extend(map(pathjoin, names))
319
320 def _caller(tblist, skip):
321 string = ""
322 arr = []
323 for file, line, name, text in tblist:
324 if file[-10:] == "TestCmd.py":
325 break
326 arr = [(file, line, name, text)] + arr
327 atfrom = "at"
328 for file, line, name, text in arr[skip:]:
329 if name in ("?", "<module>"):
330 name = ""
331 else:
332 name = " (" + name + ")"
333 string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
334 atfrom = "\tfrom"
335 return string
336
337 def fail_test(self = None, condition = 1, function = None, skip = 0):
338 """Cause the test to fail.
339
340 By default, the fail_test() method reports that the test FAILED
341 and exits with a status of 1. If a condition argument is supplied,
342 the test fails only if the condition is true.
343 """
344 if not condition:
345 return
346 if not function is None:
347 function()
348 of = ""
349 desc = ""
350 sep = " "
351 if not self is None:
352 if self.program:
353 of = " of " + self.program
354 sep = "\n\t"
355 if self.description:
356 desc = " [" + self.description + "]"
357 sep = "\n\t"
358
359 at = _caller(traceback.extract_stack(), skip)
360 sys.stderr.write("FAILED test" + of + desc + sep + at)
361
362 sys.exit(1)
363
364 def no_result(self = None, condition = 1, function = None, skip = 0):
365 """Causes a test to exit with no valid result.
366
367 By default, the no_result() method reports NO RESULT for the test
368 and exits with a status of 2. If a condition argument is supplied,
369 the test fails only if the condition is true.
370 """
371 if not condition:
372 return
373 if not function is None:
374 function()
375 of = ""
376 desc = ""
377 sep = " "
378 if not self is None:
379 if self.program:
380 of = " of " + self.program
381 sep = "\n\t"
382 if self.description:
383 desc = " [" + self.description + "]"
384 sep = "\n\t"
385
386 at = _caller(traceback.extract_stack(), skip)
387 sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
388
389 sys.exit(2)
390
391 def pass_test(self = None, condition = 1, function = None):
392 """Causes a test to pass.
393
394 By default, the pass_test() method reports PASSED for the test
395 and exits with a status of 0. If a condition argument is supplied,
396 the test passes only if the condition is true.
397 """
398 if not condition:
399 return
400 if not function is None:
401 function()
402 sys.stderr.write("PASSED\n")
403 sys.exit(0)
404
405 def match_exact(lines = None, matches = None):
406 """
407 """
408 if not is_List(lines):
409 lines = string.split(lines, "\n")
410 if not is_List(matches):
411 matches = string.split(matches, "\n")
412 if len(lines) != len(matches):
413 return
414 for i in range(len(lines)):
415 if lines[i] != matches[i]:
416 return
417 return 1
418
419 def match_re(lines = None, res = None):
420 """
421 """
422 if not is_List(lines):
423 lines = string.split(lines, "\n")
424 if not is_List(res):
425 res = string.split(res, "\n")
426 if len(lines) != len(res):
427 return
428 for i in range(len(lines)):
429 s = "^" + res[i] + "$"
430 try:
431 expr = re.compile(s)
432 except re.error, e:
433 msg = "Regular expression error in %s: %s"
434 raise re.error, msg % (repr(s), e[0])
435 if not expr.search(lines[i]):
436 return
437 return 1
438
439 def match_re_dotall(lines = None, res = None):
440 """
441 """
442 if not type(lines) is type(""):
443 lines = string.join(lines, "\n")
444 if not type(res) is type(""):
445 res = string.join(res, "\n")
446 s = "^" + res + "$"
447 try:
448 expr = re.compile(s, re.DOTALL)
449 except re.error, e:
450 msg = "Regular expression error in %s: %s"
451 raise re.error, msg % (repr(s), e[0])
452 if expr.match(lines):
453 return 1
454
455 try:
456 import difflib
457 except ImportError:
458 pass
459 else:
460 def simple_diff(a, b, fromfile='', tofile='',
461 fromfiledate='', tofiledate='', n=3, lineterm='\n'):
462 """
463 A function with the same calling signature as difflib.context_diff
464 (diff -c) and difflib.unified_diff (diff -u) but which prints
465 output like the simple, unadorned 'diff" command.
466 """
467 sm = difflib.SequenceMatcher(None, a, b)
468 def comma(x1, x2):
469 return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
470 result = []
471 for op, a1, a2, b1, b2 in sm.get_opcodes():
472 if op == 'delete':
473 result.append("%sd%d" % (comma(a1, a2), b1))
474 result.extend(map(lambda l: '< ' + l, a[a1:a2]))
475 elif op == 'insert':
476 result.append("%da%s" % (a1, comma(b1, b2)))
477 result.extend(map(lambda l: '> ' + l, b[b1:b2]))
478 elif op == 'replace':
479 result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
480 result.extend(map(lambda l: '< ' + l, a[a1:a2]))
481 result.append('---')
482 result.extend(map(lambda l: '> ' + l, b[b1:b2]))
483 return result
484
485 def diff_re(a, b, fromfile='', tofile='',
486 fromfiledate='', tofiledate='', n=3, lineterm='\n'):
487 """
488 A simple "diff" of two sets of lines when the expected lines
489 are regular expressions. This is a really dumb thing that
490 just compares each line in turn, so it doesn't look for
491 chunks of matching lines and the like--but at least it lets
492 you know exactly which line first didn't compare correctl...
493 """
494 result = []
495 diff = len(a) - len(b)
496 if diff < 0:
497 a = a + ['']*(-diff)
498 elif diff > 0:
499 b = b + ['']*diff
500 i = 0
501 for aline, bline in zip(a, b):
502 s = "^" + aline + "$"
503 try:
504 expr = re.compile(s)
505 except re.error, e:
506 msg = "Regular expression error in %s: %s"
507 raise re.error, msg % (repr(s), e[0])
508 if not expr.search(bline):
509 result.append("%sc%s" % (i+1, i+1))
510 result.append('< ' + repr(a[i]))
511 result.append('---')
512 result.append('> ' + repr(b[i]))
513 i = i+1
514 return result
515
516 if os.name == 'java':
517
518 python_executable = os.path.join(sys.prefix, 'jython')
519
520 else:
521
522 python_executable = sys.executable
523
524 if sys.platform == 'win32':
525
526 default_sleep_seconds = 2
527
528 def where_is(file, path=None, pathext=None):
529 if path is None:
530 path = os.environ['PATH']
531 if is_String(path):
532 path = string.split(path, os.pathsep)
533 if pathext is None:
534 pathext = os.environ['PATHEXT']
535 if is_String(pathext):
536 pathext = string.split(pathext, os.pathsep)
537 for ext in pathext:
538 if string.lower(ext) == string.lower(file[-len(ext):]):
539 pathext = ['']
540 break
541 for dir in path:
542 f = os.path.join(dir, file)
543 for ext in pathext:
544 fext = f + ext
545 if os.path.isfile(fext):
546 return fext
547 return None
548
549 else:
550
551 def where_is(file, path=None, pathext=None):
552 if path is None:
553 path = os.environ['PATH']
554 if is_String(path):
555 path = string.split(path, os.pathsep)
556 for dir in path:
557 f = os.path.join(dir, file)
558 if os.path.isfile(f):
559 try:
560 st = os.stat(f)
561 except OSError:
562 continue
563 if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
564 return f
565 return None
566
567 default_sleep_seconds = 1
568
569
570
571 try:
572 import subprocess
573 except ImportError:
574 # The subprocess module doesn't exist in this version of Python,
575 # so we're going to cobble up something that looks just enough
576 # like its API for our purposes below.
577 import new
578
579 subprocess = new.module('subprocess')
580
581 subprocess.PIPE = 'PIPE'
582 subprocess.STDOUT = 'STDOUT'
583 subprocess.mswindows = (sys.platform == 'win32')
584
585 try:
586 import popen2
587 popen2.Popen3
588 except AttributeError:
589 class Popen3:
590 universal_newlines = 1
591 def __init__(self, command, **kw):
592 if sys.platform == 'win32' and command[0] == '"':
593 command = '"' + command + '"'
594 (stdin, stdout, stderr) = os.popen3(' ' + command)
595 self.stdin = stdin
596 self.stdout = stdout
597 self.stderr = stderr
598 def close_output(self):
599 self.stdout.close()
600 self.resultcode = self.stderr.close()
601 def wait(self):
602 resultcode = self.resultcode
603 if os.WIFEXITED(resultcode):
604 return os.WEXITSTATUS(resultcode)
605 elif os.WIFSIGNALED(resultcode):
606 return os.WTERMSIG(resultcode)
607 else:
608 return None
609
610 else:
611 try:
612 popen2.Popen4
613 except AttributeError:
614 # A cribbed Popen4 class, with some retrofitted code from
615 # the Python 1.5 Popen3 class methods to do certain things
616 # by hand.
617 class Popen4(popen2.Popen3):
618 childerr = None
619
620 def __init__(self, cmd, bufsize=-1):
621 p2cread, p2cwrite = os.pipe()
622 c2pread, c2pwrite = os.pipe()
623 self.pid = os.fork()
624 if self.pid == 0:
625 # Child
626 os.dup2(p2cread, 0)
627 os.dup2(c2pwrite, 1)
628 os.dup2(c2pwrite, 2)
629 for i in range(3, popen2.MAXFD):
630 try:
631 os.close(i)
632 except: pass
633 try:
634 os.execvp(cmd[0], cmd)
635 finally:
636 os._exit(1)
637 # Shouldn't come here, I guess
638 os._exit(1)
639 os.close(p2cread)
640 self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
641 os.close(c2pwrite)
642 self.fromchild = os.fdopen(c2pread, 'r', bufsize)
643 popen2._active.append(self)
644
645 popen2.Popen4 = Popen4
646
647 class Popen3(popen2.Popen3, popen2.Popen4):
648 universal_newlines = 1
649 def __init__(self, command, **kw):
650 if kw.get('stderr') == 'STDOUT':
651 apply(popen2.Popen4.__init__, (self, command, 1))
652 else:
653 apply(popen2.Popen3.__init__, (self, command, 1))
654 self.stdin = self.tochild
655 self.stdout = self.fromchild
656 self.stderr = self.childerr
657 def wait(self, *args, **kw):
658 resultcode = apply(popen2.Popen3.wait, (self,)+args, kw)
659 if os.WIFEXITED(resultcode):
660 return os.WEXITSTATUS(resultcode)
661 elif os.WIFSIGNALED(resultcode):
662 return os.WTERMSIG(resultcode)
663 else:
664 return None
665
666 subprocess.Popen = Popen3
667
668
669
670 # From Josiah Carlson,
671 # ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
672 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
673
674 PIPE = subprocess.PIPE
675
676 if subprocess.mswindows:
677 from win32file import ReadFile, WriteFile
678 from win32pipe import PeekNamedPipe
679 import msvcrt
680 else:
681 import select
682 import fcntl
683
684 try: fcntl.F_GETFL
685 except AttributeError: fcntl.F_GETFL = 3
686
687 try: fcntl.F_SETFL
688 except AttributeError: fcntl.F_SETFL = 4
689
690 class Popen(subprocess.Popen):
691 def recv(self, maxsize=None):
692 return self._recv('stdout', maxsize)
693
694 def recv_err(self, maxsize=None):
695 return self._recv('stderr', maxsize)
696
697 def send_recv(self, input='', maxsize=None):
698 return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
699
700 def get_conn_maxsize(self, which, maxsize):
701 if maxsize is None:
702 maxsize = 1024
703 elif maxsize < 1:
704 maxsize = 1
705 return getattr(self, which), maxsize
706
707 def _close(self, which):
708 getattr(self, which).close()
709 setattr(self, which, None)
710
711 if subprocess.mswindows:
712 def send(self, input):
713 if not self.stdin:
714 return None
715
716 try:
717 x = msvcrt.get_osfhandle(self.stdin.fileno())
718 (errCode, written) = WriteFile(x, input)
719 except ValueError:
720 return self._close('stdin')
721 except (subprocess.pywintypes.error, Exception), why:
722 if why[0] in (109, errno.ESHUTDOWN):
723 return self._close('stdin')
724 raise
725
726 return written
727
728 def _recv(self, which, maxsize):
729 conn, maxsize = self.get_conn_maxsize(which, maxsize)
730 if conn is None:
731 return None
732
733 try:
734 x = msvcrt.get_osfhandle(conn.fileno())
735 (read, nAvail, nMessage) = PeekNamedPipe(x, 0)
736 if maxsize < nAvail:
737 nAvail = maxsize
738 if nAvail > 0:
739 (errCode, read) = ReadFile(x, nAvail, None)
740 except ValueError:
741 return self._close(which)
742 except (subprocess.pywintypes.error, Exception), why:
743 if why[0] in (109, errno.ESHUTDOWN):
744 return self._close(which)
745 raise
746
747 #if self.universal_newlines:
748 # read = self._translate_newlines(read)
749 return read
750
751 else:
752 def send(self, input):
753 if not self.stdin:
754 return None
755
756 if not select.select([], [self.stdin], [], 0)[1]:
757 return 0
758
759 try:
760 written = os.write(self.stdin.fileno(), input)
761 except OSError, why:
762 if why[0] == errno.EPIPE: #broken pipe
763 return self._close('stdin')
764 raise
765
766 return written
767
768 def _recv(self, which, maxsize):
769 conn, maxsize = self.get_conn_maxsize(which, maxsize)
770 if conn is None:
771 return None
772
773 try:
774 flags = fcntl.fcntl(conn, fcntl.F_GETFL)
775 except TypeError:
776 flags = None
777 else:
778 if not conn.closed:
779 fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
780
781 try:
782 if not select.select([conn], [], [], 0)[0]:
783 return ''
784
785 r = conn.read(maxsize)
786 if not r:
787 return self._close(which)
788
789 #if self.universal_newlines:
790 # r = self._translate_newlines(r)
791 return r
792 finally:
793 if not conn.closed and not flags is None:
794 fcntl.fcntl(conn, fcntl.F_SETFL, flags)
795
796 disconnect_message = "Other end disconnected!"
797
798 def recv_some(p, t=.1, e=1, tr=5, stderr=0):
799 if tr < 1:
800 tr = 1
801 x = time.time()+t
802 y = []
803 r = ''
804 pr = p.recv
805 if stderr:
806 pr = p.recv_err
807 while time.time() < x or r:
808 r = pr()
809 if r is None:
810 if e:
811 raise Exception(disconnect_message)
812 else:
813 break
814 elif r:
815 y.append(r)
816 else:
817 time.sleep(max((x-time.time())/tr, 0))
818 return ''.join(y)
819
820 # TODO(3.0: rewrite to use memoryview()
821 def send_all(p, data):
822 while len(data):
823 sent = p.send(data)
824 if sent is None:
825 raise Exception(disconnect_message)
826 data = buffer(data, sent)
827
828
829
830 try:
831 object
832 except NameError:
833 class object:
834 pass
835
836
837
838 class TestCmd(object):
839 """Class TestCmd
840 """
841
842 def __init__(self, description = None,
843 program = None,
844 interpreter = None,
845 workdir = None,
846 subdir = None,
847 verbose = None,
848 match = None,
849 diff = None,
850 combine = 0,
851 universal_newlines = 1):
852 self._cwd = os.getcwd()
853 self.description_set(description)
854 self.program_set(program)
855 self.interpreter_set(interpreter)
856 if verbose is None:
857 try:
858 verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
859 except ValueError:
860 verbose = 0
861 self.verbose_set(verbose)
862 self.combine = combine
863 self.universal_newlines = universal_newlines
864 if not match is None:
865 self.match_function = match
866 else:
867 self.match_function = match_re
868 if not diff is None:
869 self.diff_function = diff
870 else:
871 try:
872 difflib
873 except NameError:
874 pass
875 else:
876 self.diff_function = simple_diff
877 #self.diff_function = difflib.context_diff
878 #self.diff_function = difflib.unified_diff
879 self._dirlist = []
880 self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
881 if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
882 self._preserve['pass_test'] = os.environ['PRESERVE']
883 self._preserve['fail_test'] = os.environ['PRESERVE']
884 self._preserve['no_result'] = os.environ['PRESERVE']
885 else:
886 try:
887 self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
888 except KeyError:
889 pass
890 try:
891 self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
892 except KeyError:
893 pass
894 try:
895 self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
896 except KeyError:
897 pass
898 self._stdout = []
899 self._stderr = []
900 self.status = None
901 self.condition = 'no_result'
902 self.workdir_set(workdir)
903 self.subdir(subdir)
904
905 def __del__(self):
906 self.cleanup()
907
908 def __repr__(self):
909 return "%x" % id(self)
910
911 banner_char = '='
912 banner_width = 80
913
914 def banner(self, s, width=None):
915 if width is None:
916 width = self.banner_width
917 return s + self.banner_char * (width - len(s))
918
919 if os.name == 'posix':
920
921 def escape(self, arg):
922 "escape shell special characters"
923 slash = '\\'
924 special = '"$'
925
926 arg = string.replace(arg, slash, slash+slash)
927 for c in special:
928 arg = string.replace(arg, c, slash+c)
929
930 if re_space.search(arg):
931 arg = '"' + arg + '"'
932 return arg
933
934 else:
935
936 # Windows does not allow special characters in file names
937 # anyway, so no need for an escape function, we will just quote
938 # the arg.
939 def escape(self, arg):
940 if re_space.search(arg):
941 arg = '"' + arg + '"'
942 return arg
943
944 def canonicalize(self, path):
945 if is_List(path):
946 path = apply(os.path.join, tuple(path))
947 if not os.path.isabs(path):
948 path = os.path.join(self.workdir, path)
949 return path
950
951 def chmod(self, path, mode):
952 """Changes permissions on the specified file or directory
953 path name."""
954 path = self.canonicalize(path)
955 os.chmod(path, mode)
956
957 def cleanup(self, condition = None):
958 """Removes any temporary working directories for the specified
959 TestCmd environment. If the environment variable PRESERVE was
960 set when the TestCmd environment was created, temporary working
961 directories are not removed. If any of the environment variables
962 PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
963 when the TestCmd environment was created, then temporary working
964 directories are not removed if the test passed, failed, or had
965 no result, respectively. Temporary working directories are also
966 preserved for conditions specified via the preserve method.
967
968 Typically, this method is not called directly, but is used when
969 the script exits to clean up temporary working directories as
970 appropriate for the exit status.
971 """
972 if not self._dirlist:
973 return
974 os.chdir(self._cwd)
975 self.workdir = None
976 if condition is None:
977 condition = self.condition
978 if self._preserve[condition]:
979 for dir in self._dirlist:
980 print "Preserved directory", dir
981 else:
982 list = self._dirlist[:]
983 list.reverse()
984 for dir in list:
985 self.writable(dir, 1)
986 shutil.rmtree(dir, ignore_errors = 1)
987 self._dirlist = []
988
989 try:
990 global _Cleanup
991 _Cleanup.remove(self)
992 except (AttributeError, ValueError):
993 pass
994
995 def command_args(self, program = None,
996 interpreter = None,
997 arguments = None):
998 if program:
999 if type(program) == type('') and not os.path.isabs(program):
1000 program = os.path.join(self._cwd, program)
1001 else:
1002 program = self.program
1003 if not interpreter:
1004 interpreter = self.interpreter
1005 if not type(program) in [type([]), type(())]:
1006 program = [program]
1007 cmd = list(program)
1008 if interpreter:
1009 if not type(interpreter) in [type([]), type(())]:
1010 interpreter = [interpreter]
1011 cmd = list(interpreter) + cmd
1012 if arguments:
1013 if type(arguments) == type(''):
1014 arguments = string.split(arguments)
1015 cmd.extend(arguments)
1016 return cmd
1017
1018 def description_set(self, description):
1019 """Set the description of the functionality being tested.
1020 """
1021 self.description = description
1022
1023 try:
1024 difflib
1025 except NameError:
1026 def diff(self, a, b, name, *args, **kw):
1027 print self.banner('Expected %s' % name)
1028 print a
1029 print self.banner('Actual %s' % name)
1030 print b
1031 else:
1032 def diff(self, a, b, name, *args, **kw):
1033 print self.banner(name)
1034 args = (a.splitlines(), b.splitlines()) + args
1035 lines = apply(self.diff_function, args, kw)
1036 for l in lines:
1037 print l
1038
1039 def fail_test(self, condition = 1, function = None, skip = 0):
1040 """Cause the test to fail.
1041 """
1042 if not condition:
1043 return
1044 self.condition = 'fail_test'
1045 fail_test(self = self,
1046 condition = condition,
1047 function = function,
1048 skip = skip)
1049
1050 def interpreter_set(self, interpreter):
1051 """Set the program to be used to interpret the program
1052 under test as a script.
1053 """
1054 self.interpreter = interpreter
1055
1056 def match(self, lines, matches):
1057 """Compare actual and expected file contents.
1058 """
1059 return self.match_function(lines, matches)
1060
1061 def match_exact(self, lines, matches):
1062 """Compare actual and expected file contents.
1063 """
1064 return match_exact(lines, matches)
1065
1066 def match_re(self, lines, res):
1067 """Compare actual and expected file contents.
1068 """
1069 return match_re(lines, res)
1070
1071 def match_re_dotall(self, lines, res):
1072 """Compare actual and expected file contents.
1073 """
1074 return match_re_dotall(lines, res)
1075
1076 def no_result(self, condition = 1, function = None, skip = 0):
1077 """Report that the test could not be run.
1078 """
1079 if not condition:
1080 return
1081 self.condition = 'no_result'
1082 no_result(self = self,
1083 condition = condition,
1084 function = function,
1085 skip = skip)
1086
1087 def pass_test(self, condition = 1, function = None):
1088 """Cause the test to pass.
1089 """
1090 if not condition:
1091 return
1092 self.condition = 'pass_test'
1093 pass_test(self = self, condition = condition, function = function)
1094
1095 def preserve(self, *conditions):
1096 """Arrange for the temporary working directories for the
1097 specified TestCmd environment to be preserved for one or more
1098 conditions. If no conditions are specified, arranges for
1099 the temporary working directories to be preserved for all
1100 conditions.
1101 """
1102 if conditions is ():
1103 conditions = ('pass_test', 'fail_test', 'no_result')
1104 for cond in conditions:
1105 self._preserve[cond] = 1
1106
1107 def program_set(self, program):
1108 """Set the executable program or script to be tested.
1109 """
1110 if program and not os.path.isabs(program):
1111 program = os.path.join(self._cwd, program)
1112 self.program = program
1113
1114 def read(self, file, mode = 'rb'):
1115 """Reads and returns the contents of the specified file name.
1116 The file name may be a list, in which case the elements are
1117 concatenated with the os.path.join() method. The file is
1118 assumed to be under the temporary working directory unless it
1119 is an absolute path name. The I/O mode for the file may
1120 be specified; it must begin with an 'r'. The default is
1121 'rb' (binary read).
1122 """
1123 file = self.canonicalize(file)
1124 if mode[0] != 'r':
1125 raise ValueError, "mode must begin with 'r'"
1126 return open(file, mode).read()
1127
1128 def rmdir(self, dir):
1129 """Removes the specified dir name.
1130 The dir name may be a list, in which case the elements are
1131 concatenated with the os.path.join() method. The dir is
1132 assumed to be under the temporary working directory unless it
1133 is an absolute path name.
1134 The dir must be empty.
1135 """
1136 dir = self.canonicalize(dir)
1137 os.rmdir(dir)
1138
1139 def start(self, program = None,
1140 interpreter = None,
1141 arguments = None,
1142 universal_newlines = None,
1143 **kw):
1144 """
1145 Starts a program or script for the test environment.
1146
1147 The specified program will have the original directory
1148 prepended unless it is enclosed in a [list].
1149 """
1150 cmd = self.command_args(program, interpreter, arguments)
1151 cmd_string = string.join(map(self.escape, cmd), ' ')
1152 if self.verbose:
1153 sys.stderr.write(cmd_string + "\n")
1154 if universal_newlines is None:
1155 universal_newlines = self.universal_newlines
1156
1157 # On Windows, if we make stdin a pipe when we plan to send
1158 # no input, and the test program exits before
1159 # Popen calls msvcrt.open_osfhandle, that call will fail.
1160 # So don't use a pipe for stdin if we don't need one.
1161 stdin = kw.get('stdin', None)
1162 if stdin is not None:
1163 stdin = subprocess.PIPE
1164
1165 combine = kw.get('combine', self.combine)
1166 if combine:
1167 stderr_value = subprocess.STDOUT
1168 else:
1169 stderr_value = subprocess.PIPE
1170
1171 return Popen(cmd,
1172 stdin=stdin,
1173 stdout=subprocess.PIPE,
1174 stderr=stderr_value,
1175 universal_newlines=universal_newlines)
1176
1177 def finish(self, popen, **kw):
1178 """
1179 Finishes and waits for the process being run under control of
1180 the specified popen argument, recording the exit status,
1181 standard output and error output.
1182 """
1183 popen.stdin.close()
1184 self.status = popen.wait()
1185 if not self.status:
1186 self.status = 0
1187 self._stdout.append(popen.stdout.read())
1188 if popen.stderr:
1189 stderr = popen.stderr.read()
1190 else:
1191 stderr = ''
1192 self._stderr.append(stderr)
1193
1194 def run(self, program = None,
1195 interpreter = None,
1196 arguments = None,
1197 chdir = None,
1198 stdin = None,
1199 universal_newlines = None):
1200 """Runs a test of the program or script for the test
1201 environment. Standard output and error output are saved for
1202 future retrieval via the stdout() and stderr() methods.
1203
1204 The specified program will have the original directory
1205 prepended unless it is enclosed in a [list].
1206 """
1207 if chdir:
1208 oldcwd = os.getcwd()
1209 if not os.path.isabs(chdir):
1210 chdir = os.path.join(self.workpath(chdir))
1211 if self.verbose:
1212 sys.stderr.write("chdir(" + chdir + ")\n")
1213 os.chdir(chdir)
1214 p = self.start(program,
1215 interpreter,
1216 arguments,
1217 universal_newlines,
1218 stdin=stdin)
1219 if stdin:
1220 if is_List(stdin):
1221 for line in stdin:
1222 p.stdin.write(line)
1223 else:
1224 p.stdin.write(stdin)
1225 p.stdin.close()
1226
1227 out = p.stdout.read()
1228 if p.stderr is None:
1229 err = ''
1230 else:
1231 err = p.stderr.read()
1232 try:
1233 close_output = p.close_output
1234 except AttributeError:
1235 p.stdout.close()
1236 if not p.stderr is None:
1237 p.stderr.close()
1238 else:
1239 close_output()
1240
1241 self._stdout.append(out)
1242 self._stderr.append(err)
1243
1244 self.status = p.wait()
1245 if not self.status:
1246 self.status = 0
1247
1248 if chdir:
1249 os.chdir(oldcwd)
1250 if self.verbose >= 2:
1251 write = sys.stdout.write
1252 write('============ STATUS: %d\n' % self.status)
1253 out = self.stdout()
1254 if out or self.verbose >= 3:
1255 write('============ BEGIN STDOUT (len=%d):\n' % len(out))
1256 write(out)
1257 write('============ END STDOUT\n')
1258 err = self.stderr()
1259 if err or self.verbose >= 3:
1260 write('============ BEGIN STDERR (len=%d)\n' % len(err))
1261 write(err)
1262 write('============ END STDERR\n')
1263
1264 def sleep(self, seconds = default_sleep_seconds):
1265 """Sleeps at least the specified number of seconds. If no
1266 number is specified, sleeps at least the minimum number of
1267 seconds necessary to advance file time stamps on the current
1268 system. Sleeping more seconds is all right.
1269 """
1270 time.sleep(seconds)
1271
1272 def stderr(self, run = None):
1273 """Returns the error output from the specified run number.
1274 If there is no specified run number, then returns the error
1275 output of the last run. If the run number is less than zero,
1276 then returns the error output from that many runs back from the
1277 current run.
1278 """
1279 if not run:
1280 run = len(self._stderr)
1281 elif run < 0:
1282 run = len(self._stderr) + run
1283 run = run - 1
1284 return self._stderr[run]
1285
1286 def stdout(self, run = None):
1287 """Returns the standard output from the specified run number.
1288 If there is no specified run number, then returns the standard
1289 output of the last run. If the run number is less than zero,
1290 then returns the standard output from that many runs back from
1291 the current run.
1292 """
1293 if not run:
1294 run = len(self._stdout)
1295 elif run < 0:
1296 run = len(self._stdout) + run
1297 run = run - 1
1298 return self._stdout[run]
1299
1300 def subdir(self, *subdirs):
1301 """Create new subdirectories under the temporary working
1302 directory, one for each argument. An argument may be a list,
1303 in which case the list elements are concatenated using the
1304 os.path.join() method. Subdirectories multiple levels deep
1305 must be created using a separate argument for each level:
1306
1307 test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
1308
1309 Returns the number of subdirectories actually created.
1310 """
1311 count = 0
1312 for sub in subdirs:
1313 if sub is None:
1314 continue
1315 if is_List(sub):
1316 sub = apply(os.path.join, tuple(sub))
1317 new = os.path.join(self.workdir, sub)
1318 try:
1319 os.mkdir(new)
1320 except OSError:
1321 pass
1322 else:
1323 count = count + 1
1324 return count
1325
1326 def symlink(self, target, link):
1327 """Creates a symlink to the specified target.
1328 The link name may be a list, in which case the elements are
1329 concatenated with the os.path.join() method. The link is
1330 assumed to be under the temporary working directory unless it
1331 is an absolute path name. The target is *not* assumed to be
1332 under the temporary working directory.
1333 """
1334 link = self.canonicalize(link)
1335 os.symlink(target, link)
1336
1337 def tempdir(self, path=None):
1338 """Creates a temporary directory.
1339 A unique directory name is generated if no path name is specified.
1340 The directory is created, and will be removed when the TestCmd
1341 object is destroyed.
1342 """
1343 if path is None:
1344 try:
1345 path = tempfile.mktemp(prefix=tempfile.template)
1346 except TypeError:
1347 path = tempfile.mktemp()
1348 os.mkdir(path)
1349
1350 # Symlinks in the path will report things
1351 # differently from os.getcwd(), so chdir there
1352 # and back to fetch the canonical path.
1353 cwd = os.getcwd()
1354 try:
1355 os.chdir(path)
1356 path = os.getcwd()
1357 finally:
1358 os.chdir(cwd)
1359
1360 # Uppercase the drive letter since the case of drive
1361 # letters is pretty much random on win32:
1362 drive,rest = os.path.splitdrive(path)
1363 if drive:
1364 path = string.upper(drive) + rest
1365
1366 #
1367 self._dirlist.append(path)
1368 global _Cleanup
1369 try:
1370 _Cleanup.index(self)
1371 except ValueError:
1372 _Cleanup.append(self)
1373
1374 return path
1375
1376 def touch(self, path, mtime=None):
1377 """Updates the modification time on the specified file or
1378 directory path name. The default is to update to the
1379 current time if no explicit modification time is specified.
1380 """
1381 path = self.canonicalize(path)
1382 atime = os.path.getatime(path)
1383 if mtime is None:
1384 mtime = time.time()
1385 os.utime(path, (atime, mtime))
1386
1387 def unlink(self, file):
1388 """Unlinks the specified file name.
1389 The file name may be a list, in which case the elements are
1390 concatenated with the os.path.join() method. The file is
1391 assumed to be under the temporary working directory unless it
1392 is an absolute path name.
1393 """
1394 file = self.canonicalize(file)
1395 os.unlink(file)
1396
1397 def verbose_set(self, verbose):
1398 """Set the verbose level.
1399 """
1400 self.verbose = verbose
1401
1402 def where_is(self, file, path=None, pathext=None):
1403 """Find an executable file.
1404 """
1405 if is_List(file):
1406 file = apply(os.path.join, tuple(file))
1407 if not os.path.isabs(file):
1408 file = where_is(file, path, pathext)
1409 return file
1410
1411 def workdir_set(self, path):
1412 """Creates a temporary working directory with the specified
1413 path name. If the path is a null string (''), a unique
1414 directory name is created.
1415 """
1416 if (path != None):
1417 if path == '':
1418 path = None
1419 path = self.tempdir(path)
1420 self.workdir = path
1421
1422 def workpath(self, *args):
1423 """Returns the absolute path name to a subdirectory or file
1424 within the current temporary working directory. Concatenates
1425 the temporary working directory name with the specified
1426 arguments using the os.path.join() method.
1427 """
1428 return apply(os.path.join, (self.workdir,) + tuple(args))
1429
1430 def readable(self, top, read=1):
1431 """Make the specified directory tree readable (read == 1)
1432 or not (read == None).
1433
1434 This method has no effect on Windows systems, which use a
1435 completely different mechanism to control file readability.
1436 """
1437
1438 if sys.platform == 'win32':
1439 return
1440
1441 if read:
1442 def do_chmod(fname):
1443 try: st = os.stat(fname)
1444 except OSError: pass
1445 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
1446 else:
1447 def do_chmod(fname):
1448 try: st = os.stat(fname)
1449 except OSError: pass
1450 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
1451
1452 if os.path.isfile(top):
1453 # If it's a file, that's easy, just chmod it.
1454 do_chmod(top)
1455 elif read:
1456 # It's a directory and we're trying to turn on read
1457 # permission, so it's also pretty easy, just chmod the
1458 # directory and then chmod every entry on our walk down the
1459 # tree. Because os.path.walk() is top-down, we'll enable
1460 # read permission on any directories that have it disabled
1461 # before os.path.walk() tries to list their contents.
1462 do_chmod(top)
1463
1464 def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
1465 for n in names:
1466 do_chmod(os.path.join(dirname, n))
1467
1468 os.path.walk(top, chmod_entries, None)
1469 else:
1470 # It's a directory and we're trying to turn off read
1471 # permission, which means we have to chmod the directoreis
1472 # in the tree bottom-up, lest disabling read permission from
1473 # the top down get in the way of being able to get at lower
1474 # parts of the tree. But os.path.walk() visits things top
1475 # down, so we just use an object to collect a list of all
1476 # of the entries in the tree, reverse the list, and then
1477 # chmod the reversed (bottom-up) list.
1478 col = Collector(top)
1479 os.path.walk(top, col, None)
1480 col.entries.reverse()
1481 for d in col.entries: do_chmod(d)
1482
1483 def writable(self, top, write=1):
1484 """Make the specified directory tree writable (write == 1)
1485 or not (write == None).
1486 """
1487
1488 if sys.platform == 'win32':
1489
1490 if write:
1491 def do_chmod(fname):
1492 try: os.chmod(fname, stat.S_IWRITE)
1493 except OSError: pass
1494 else:
1495 def do_chmod(fname):
1496 try: os.chmod(fname, stat.S_IREAD)
1497 except OSError: pass
1498
1499 else:
1500
1501 if write:
1502 def do_chmod(fname):
1503 try: st = os.stat(fname)
1504 except OSError: pass
1505 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200))
1506 else:
1507 def do_chmod(fname):
1508 try: st = os.stat(fname)
1509 except OSError: pass
1510 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200))
1511
1512 if os.path.isfile(top):
1513 do_chmod(top)
1514 else:
1515 col = Collector(top)
1516 os.path.walk(top, col, None)
1517 for d in col.entries: do_chmod(d)
1518
1519 def executable(self, top, execute=1):
1520 """Make the specified directory tree executable (execute == 1)
1521 or not (execute == None).
1522
1523 This method has no effect on Windows systems, which use a
1524 completely different mechanism to control file executability.
1525 """
1526
1527 if sys.platform == 'win32':
1528 return
1529
1530 if execute:
1531 def do_chmod(fname):
1532 try: st = os.stat(fname)
1533 except OSError: pass
1534 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
1535 else:
1536 def do_chmod(fname):
1537 try: st = os.stat(fname)
1538 except OSError: pass
1539 else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
1540
1541 if os.path.isfile(top):
1542 # If it's a file, that's easy, just chmod it.
1543 do_chmod(top)
1544 elif execute:
1545 # It's a directory and we're trying to turn on execute
1546 # permission, so it's also pretty easy, just chmod the
1547 # directory and then chmod every entry on our walk down the
1548 # tree. Because os.path.walk() is top-down, we'll enable
1549 # execute permission on any directories that have it disabled
1550 # before os.path.walk() tries to list their contents.
1551 do_chmod(top)
1552
1553 def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
1554 for n in names:
1555 do_chmod(os.path.join(dirname, n))
1556
1557 os.path.walk(top, chmod_entries, None)
1558 else:
1559 # It's a directory and we're trying to turn off execute
1560 # permission, which means we have to chmod the directories
1561 # in the tree bottom-up, lest disabling execute permission from
1562 # the top down get in the way of being able to get at lower
1563 # parts of the tree. But os.path.walk() visits things top
1564 # down, so we just use an object to collect a list of all
1565 # of the entries in the tree, reverse the list, and then
1566 # chmod the reversed (bottom-up) list.
1567 col = Collector(top)
1568 os.path.walk(top, col, None)
1569 col.entries.reverse()
1570 for d in col.entries: do_chmod(d)
1571
1572 def write(self, file, content, mode = 'wb'):
1573 """Writes the specified content text (second argument) to the
1574 specified file name (first argument). The file name may be
1575 a list, in which case the elements are concatenated with the
1576 os.path.join() method. The file is created under the temporary
1577 working directory. Any subdirectories in the path must already
1578 exist. The I/O mode for the file may be specified; it must
1579 begin with a 'w'. The default is 'wb' (binary write).
1580 """
1581 file = self.canonicalize(file)
1582 if mode[0] != 'w':
1583 raise ValueError, "mode must begin with 'w'"
1584 open(file, mode).write(content)
1585
1586 # Local Variables:
1587 # tab-width:4
1588 # indent-tabs-mode:nil
1589 # End:
1590 # vim: set expandtab tabstop=4 shiftwidth=4:
+0
-581
third_party/gyp/test/lib/TestCommon.py less more
0 """
1 TestCommon.py: a testing framework for commands and scripts
2 with commonly useful error handling
3
4 The TestCommon module provides a simple, high-level interface for writing
5 tests of executable commands and scripts, especially commands and scripts
6 that interact with the file system. All methods throw exceptions and
7 exit on failure, with useful error messages. This makes a number of
8 explicit checks unnecessary, making the test scripts themselves simpler
9 to write and easier to read.
10
11 The TestCommon class is a subclass of the TestCmd class. In essence,
12 TestCommon is a wrapper that handles common TestCmd error conditions in
13 useful ways. You can use TestCommon directly, or subclass it for your
14 program and add additional (or override) methods to tailor it to your
15 program's specific needs. Alternatively, the TestCommon class serves
16 as a useful example of how to define your own TestCmd subclass.
17
18 As a subclass of TestCmd, TestCommon provides access to all of the
19 variables and methods from the TestCmd module. Consequently, you can
20 use any variable or method documented in the TestCmd module without
21 having to explicitly import TestCmd.
22
23 A TestCommon environment object is created via the usual invocation:
24
25 import TestCommon
26 test = TestCommon.TestCommon()
27
28 You can use all of the TestCmd keyword arguments when instantiating a
29 TestCommon object; see the TestCmd documentation for details.
30
31 Here is an overview of the methods and keyword arguments that are
32 provided by the TestCommon class:
33
34 test.must_be_writable('file1', ['file2', ...])
35
36 test.must_contain('file', 'required text\n')
37
38 test.must_contain_all_lines(output, lines, ['title', find])
39
40 test.must_contain_any_line(output, lines, ['title', find])
41
42 test.must_exist('file1', ['file2', ...])
43
44 test.must_match('file', "expected contents\n")
45
46 test.must_not_be_writable('file1', ['file2', ...])
47
48 test.must_not_contain('file', 'banned text\n')
49
50 test.must_not_contain_any_line(output, lines, ['title', find])
51
52 test.must_not_exist('file1', ['file2', ...])
53
54 test.run(options = "options to be prepended to arguments",
55 stdout = "expected standard output from the program",
56 stderr = "expected error output from the program",
57 status = expected_status,
58 match = match_function)
59
60 The TestCommon module also provides the following variables
61
62 TestCommon.python_executable
63 TestCommon.exe_suffix
64 TestCommon.obj_suffix
65 TestCommon.shobj_prefix
66 TestCommon.shobj_suffix
67 TestCommon.lib_prefix
68 TestCommon.lib_suffix
69 TestCommon.dll_prefix
70 TestCommon.dll_suffix
71
72 """
73
74 # Copyright 2000-2010 Steven Knight
75 # This module is free software, and you may redistribute it and/or modify
76 # it under the same terms as Python itself, so long as this copyright message
77 # and disclaimer are retained in their original form.
78 #
79 # IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
80 # SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
81 # THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
82 # DAMAGE.
83 #
84 # THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
85 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
86 # PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
87 # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
88 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
89
90 __author__ = "Steven Knight <knight at baldmt dot com>"
91 __revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight"
92 __version__ = "0.37"
93
94 import copy
95 import os
96 import os.path
97 import stat
98 import string
99 import sys
100 import types
101 import UserList
102
103 from TestCmd import *
104 from TestCmd import __all__
105
106 __all__.extend([ 'TestCommon',
107 'exe_suffix',
108 'obj_suffix',
109 'shobj_prefix',
110 'shobj_suffix',
111 'lib_prefix',
112 'lib_suffix',
113 'dll_prefix',
114 'dll_suffix',
115 ])
116
117 # Variables that describe the prefixes and suffixes on this system.
118 if sys.platform == 'win32':
119 exe_suffix = '.exe'
120 obj_suffix = '.obj'
121 shobj_suffix = '.obj'
122 shobj_prefix = ''
123 lib_prefix = ''
124 lib_suffix = '.lib'
125 dll_prefix = ''
126 dll_suffix = '.dll'
127 elif sys.platform == 'cygwin':
128 exe_suffix = '.exe'
129 obj_suffix = '.o'
130 shobj_suffix = '.os'
131 shobj_prefix = ''
132 lib_prefix = 'lib'
133 lib_suffix = '.a'
134 dll_prefix = ''
135 dll_suffix = '.dll'
136 elif string.find(sys.platform, 'irix') != -1:
137 exe_suffix = ''
138 obj_suffix = '.o'
139 shobj_suffix = '.o'
140 shobj_prefix = ''
141 lib_prefix = 'lib'
142 lib_suffix = '.a'
143 dll_prefix = 'lib'
144 dll_suffix = '.so'
145 elif string.find(sys.platform, 'darwin') != -1:
146 exe_suffix = ''
147 obj_suffix = '.o'
148 shobj_suffix = '.os'
149 shobj_prefix = ''
150 lib_prefix = 'lib'
151 lib_suffix = '.a'
152 dll_prefix = 'lib'
153 dll_suffix = '.dylib'
154 elif string.find(sys.platform, 'sunos') != -1:
155 exe_suffix = ''
156 obj_suffix = '.o'
157 shobj_suffix = '.os'
158 shobj_prefix = 'so_'
159 lib_prefix = 'lib'
160 lib_suffix = '.a'
161 dll_prefix = 'lib'
162 dll_suffix = '.dylib'
163 else:
164 exe_suffix = ''
165 obj_suffix = '.o'
166 shobj_suffix = '.os'
167 shobj_prefix = ''
168 lib_prefix = 'lib'
169 lib_suffix = '.a'
170 dll_prefix = 'lib'
171 dll_suffix = '.so'
172
173 def is_List(e):
174 return type(e) is types.ListType \
175 or isinstance(e, UserList.UserList)
176
177 def is_writable(f):
178 mode = os.stat(f)[stat.ST_MODE]
179 return mode & stat.S_IWUSR
180
181 def separate_files(flist):
182 existing = []
183 missing = []
184 for f in flist:
185 if os.path.exists(f):
186 existing.append(f)
187 else:
188 missing.append(f)
189 return existing, missing
190
191 if os.name == 'posix':
192 def _failed(self, status = 0):
193 if self.status is None or status is None:
194 return None
195 return _status(self) != status
196 def _status(self):
197 return self.status
198 elif os.name == 'nt':
199 def _failed(self, status = 0):
200 return not (self.status is None or status is None) and \
201 self.status != status
202 def _status(self):
203 return self.status
204
205 class TestCommon(TestCmd):
206
207 # Additional methods from the Perl Test::Cmd::Common module
208 # that we may wish to add in the future:
209 #
210 # $test->subdir('subdir', ...);
211 #
212 # $test->copy('src_file', 'dst_file');
213
214 def __init__(self, **kw):
215 """Initialize a new TestCommon instance. This involves just
216 calling the base class initialization, and then changing directory
217 to the workdir.
218 """
219 apply(TestCmd.__init__, [self], kw)
220 os.chdir(self.workdir)
221
222 def must_be_writable(self, *files):
223 """Ensures that the specified file(s) exist and are writable.
224 An individual file can be specified as a list of directory names,
225 in which case the pathname will be constructed by concatenating
226 them. Exits FAILED if any of the files does not exist or is
227 not writable.
228 """
229 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
230 existing, missing = separate_files(files)
231 unwritable = filter(lambda x, iw=is_writable: not iw(x), existing)
232 if missing:
233 print "Missing files: `%s'" % string.join(missing, "', `")
234 if unwritable:
235 print "Unwritable files: `%s'" % string.join(unwritable, "', `")
236 self.fail_test(missing + unwritable)
237
238 def must_contain(self, file, required, mode = 'rb'):
239 """Ensures that the specified file contains the required text.
240 """
241 file_contents = self.read(file, mode)
242 contains = (string.find(file_contents, required) != -1)
243 if not contains:
244 print "File `%s' does not contain required string." % file
245 print self.banner('Required string ')
246 print required
247 print self.banner('%s contents ' % file)
248 print file_contents
249 self.fail_test(not contains)
250
251 def must_contain_all_lines(self, output, lines, title=None, find=None):
252 """Ensures that the specified output string (first argument)
253 contains all of the specified lines (second argument).
254
255 An optional third argument can be used to describe the type
256 of output being searched, and only shows up in failure output.
257
258 An optional fourth argument can be used to supply a different
259 function, of the form "find(line, output), to use when searching
260 for lines in the output.
261 """
262 if find is None:
263 find = lambda o, l: string.find(o, l) != -1
264 missing = []
265 for line in lines:
266 if not find(output, line):
267 missing.append(line)
268
269 if missing:
270 if title is None:
271 title = 'output'
272 sys.stdout.write("Missing expected lines from %s:\n" % title)
273 for line in missing:
274 sys.stdout.write(' ' + repr(line) + '\n')
275 sys.stdout.write(self.banner(title + ' '))
276 sys.stdout.write(output)
277 self.fail_test()
278
279 def must_contain_any_line(self, output, lines, title=None, find=None):
280 """Ensures that the specified output string (first argument)
281 contains at least one of the specified lines (second argument).
282
283 An optional third argument can be used to describe the type
284 of output being searched, and only shows up in failure output.
285
286 An optional fourth argument can be used to supply a different
287 function, of the form "find(line, output), to use when searching
288 for lines in the output.
289 """
290 if find is None:
291 find = lambda o, l: string.find(o, l) != -1
292 for line in lines:
293 if find(output, line):
294 return
295
296 if title is None:
297 title = 'output'
298 sys.stdout.write("Missing any expected line from %s:\n" % title)
299 for line in lines:
300 sys.stdout.write(' ' + repr(line) + '\n')
301 sys.stdout.write(self.banner(title + ' '))
302 sys.stdout.write(output)
303 self.fail_test()
304
305 def must_contain_lines(self, lines, output, title=None):
306 # Deprecated; retain for backwards compatibility.
307 return self.must_contain_all_lines(output, lines, title)
308
309 def must_exist(self, *files):
310 """Ensures that the specified file(s) must exist. An individual
311 file be specified as a list of directory names, in which case the
312 pathname will be constructed by concatenating them. Exits FAILED
313 if any of the files does not exist.
314 """
315 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
316 missing = filter(lambda x: not os.path.exists(x), files)
317 if missing:
318 print "Missing files: `%s'" % string.join(missing, "', `")
319 self.fail_test(missing)
320
321 def must_match(self, file, expect, mode = 'rb'):
322 """Matches the contents of the specified file (first argument)
323 against the expected contents (second argument). The expected
324 contents are a list of lines or a string which will be split
325 on newlines.
326 """
327 file_contents = self.read(file, mode)
328 try:
329 self.fail_test(not self.match(file_contents, expect))
330 except KeyboardInterrupt:
331 raise
332 except:
333 print "Unexpected contents of `%s'" % file
334 self.diff(expect, file_contents, 'contents ')
335 raise
336
337 def must_not_contain(self, file, banned, mode = 'rb'):
338 """Ensures that the specified file doesn't contain the banned text.
339 """
340 file_contents = self.read(file, mode)
341 contains = (string.find(file_contents, banned) != -1)
342 if contains:
343 print "File `%s' contains banned string." % file
344 print self.banner('Banned string ')
345 print banned
346 print self.banner('%s contents ' % file)
347 print file_contents
348 self.fail_test(contains)
349
350 def must_not_contain_any_line(self, output, lines, title=None, find=None):
351 """Ensures that the specified output string (first argument)
352 does not contain any of the specified lines (second argument).
353
354 An optional third argument can be used to describe the type
355 of output being searched, and only shows up in failure output.
356
357 An optional fourth argument can be used to supply a different
358 function, of the form "find(line, output), to use when searching
359 for lines in the output.
360 """
361 if find is None:
362 find = lambda o, l: string.find(o, l) != -1
363 unexpected = []
364 for line in lines:
365 if find(output, line):
366 unexpected.append(line)
367
368 if unexpected:
369 if title is None:
370 title = 'output'
371 sys.stdout.write("Unexpected lines in %s:\n" % title)
372 for line in unexpected:
373 sys.stdout.write(' ' + repr(line) + '\n')
374 sys.stdout.write(self.banner(title + ' '))
375 sys.stdout.write(output)
376 self.fail_test()
377
378 def must_not_contain_lines(self, lines, output, title=None):
379 return self.must_not_contain_any_line(output, lines, title)
380
381 def must_not_exist(self, *files):
382 """Ensures that the specified file(s) must not exist.
383 An individual file be specified as a list of directory names, in
384 which case the pathname will be constructed by concatenating them.
385 Exits FAILED if any of the files exists.
386 """
387 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
388 existing = filter(os.path.exists, files)
389 if existing:
390 print "Unexpected files exist: `%s'" % string.join(existing, "', `")
391 self.fail_test(existing)
392
393
394 def must_not_be_writable(self, *files):
395 """Ensures that the specified file(s) exist and are not writable.
396 An individual file can be specified as a list of directory names,
397 in which case the pathname will be constructed by concatenating
398 them. Exits FAILED if any of the files does not exist or is
399 writable.
400 """
401 files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
402 existing, missing = separate_files(files)
403 writable = filter(is_writable, existing)
404 if missing:
405 print "Missing files: `%s'" % string.join(missing, "', `")
406 if writable:
407 print "Writable files: `%s'" % string.join(writable, "', `")
408 self.fail_test(missing + writable)
409
410 def _complete(self, actual_stdout, expected_stdout,
411 actual_stderr, expected_stderr, status, match):
412 """
413 Post-processes running a subcommand, checking for failure
414 status and displaying output appropriately.
415 """
416 if _failed(self, status):
417 expect = ''
418 if status != 0:
419 expect = " (expected %s)" % str(status)
420 print "%s returned %s%s" % (self.program, str(_status(self)), expect)
421 print self.banner('STDOUT ')
422 print actual_stdout
423 print self.banner('STDERR ')
424 print actual_stderr
425 self.fail_test()
426 if not expected_stdout is None and not match(actual_stdout, expected_stdout):
427 self.diff(expected_stdout, actual_stdout, 'STDOUT ')
428 if actual_stderr:
429 print self.banner('STDERR ')
430 print actual_stderr
431 self.fail_test()
432 if not expected_stderr is None and not match(actual_stderr, expected_stderr):
433 print self.banner('STDOUT ')
434 print actual_stdout
435 self.diff(expected_stderr, actual_stderr, 'STDERR ')
436 self.fail_test()
437
438 def start(self, program = None,
439 interpreter = None,
440 arguments = None,
441 universal_newlines = None,
442 **kw):
443 """
444 Starts a program or script for the test environment.
445
446 This handles the "options" keyword argument and exceptions.
447 """
448 try:
449 options = kw['options']
450 del kw['options']
451 except KeyError:
452 pass
453 else:
454 if options:
455 if arguments is None:
456 arguments = options
457 else:
458 arguments = options + " " + arguments
459 try:
460 return apply(TestCmd.start,
461 (self, program, interpreter, arguments, universal_newlines),
462 kw)
463 except KeyboardInterrupt:
464 raise
465 except Exception, e:
466 print self.banner('STDOUT ')
467 try:
468 print self.stdout()
469 except IndexError:
470 pass
471 print self.banner('STDERR ')
472 try:
473 print self.stderr()
474 except IndexError:
475 pass
476 cmd_args = self.command_args(program, interpreter, arguments)
477 sys.stderr.write('Exception trying to execute: %s\n' % cmd_args)
478 raise e
479
480 def finish(self, popen, stdout = None, stderr = '', status = 0, **kw):
481 """
482 Finishes and waits for the process being run under control of
483 the specified popen argument. Additional arguments are similar
484 to those of the run() method:
485
486 stdout The expected standard output from
487 the command. A value of None means
488 don't test standard output.
489
490 stderr The expected error output from
491 the command. A value of None means
492 don't test error output.
493
494 status The expected exit status from the
495 command. A value of None means don't
496 test exit status.
497 """
498 apply(TestCmd.finish, (self, popen,), kw)
499 match = kw.get('match', self.match)
500 self._complete(self.stdout(), stdout,
501 self.stderr(), stderr, status, match)
502
503 def run(self, options = None, arguments = None,
504 stdout = None, stderr = '', status = 0, **kw):
505 """Runs the program under test, checking that the test succeeded.
506
507 The arguments are the same as the base TestCmd.run() method,
508 with the addition of:
509
510 options Extra options that get appended to the beginning
511 of the arguments.
512
513 stdout The expected standard output from
514 the command. A value of None means
515 don't test standard output.
516
517 stderr The expected error output from
518 the command. A value of None means
519 don't test error output.
520
521 status The expected exit status from the
522 command. A value of None means don't
523 test exit status.
524
525 By default, this expects a successful exit (status = 0), does
526 not test standard output (stdout = None), and expects that error
527 output is empty (stderr = "").
528 """
529 if options:
530 if arguments is None:
531 arguments = options
532 else:
533 arguments = options + " " + arguments
534 kw['arguments'] = arguments
535 try:
536 match = kw['match']
537 del kw['match']
538 except KeyError:
539 match = self.match
540 apply(TestCmd.run, [self], kw)
541 self._complete(self.stdout(), stdout,
542 self.stderr(), stderr, status, match)
543
544 def skip_test(self, message="Skipping test.\n"):
545 """Skips a test.
546
547 Proper test-skipping behavior is dependent on the external
548 TESTCOMMON_PASS_SKIPS environment variable. If set, we treat
549 the skip as a PASS (exit 0), and otherwise treat it as NO RESULT.
550 In either case, we print the specified message as an indication
551 that the substance of the test was skipped.
552
553 (This was originally added to support development under Aegis.
554 Technically, skipping a test is a NO RESULT, but Aegis would
555 treat that as a test failure and prevent the change from going to
556 the next step. Since we ddn't want to force anyone using Aegis
557 to have to install absolutely every tool used by the tests, we
558 would actually report to Aegis that a skipped test has PASSED
559 so that the workflow isn't held up.)
560 """
561 if message:
562 sys.stdout.write(message)
563 sys.stdout.flush()
564 pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS')
565 if pass_skips in [None, 0, '0']:
566 # skip=1 means skip this function when showing where this
567 # result came from. They only care about the line where the
568 # script called test.skip_test(), not the line number where
569 # we call test.no_result().
570 self.no_result(skip=1)
571 else:
572 # We're under the development directory for this change,
573 # so this is an Aegis invocation; pass the test (exit 0).
574 self.pass_test()
575
576 # Local Variables:
577 # tab-width:4
578 # indent-tabs-mode:nil
579 # End:
580 # vim: set expandtab tabstop=4 shiftwidth=4:
+0
-686
third_party/gyp/test/lib/TestGyp.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 TestGyp.py: a testing framework for GYP integration tests.
8 """
9
10 import os
11 import re
12 import shutil
13 import stat
14 import sys
15
16 import TestCommon
17 from TestCommon import __all__
18
19 __all__.extend([
20 'TestGyp',
21 ])
22
23
24 class TestGypBase(TestCommon.TestCommon):
25 """
26 Class for controlling end-to-end tests of gyp generators.
27
28 Instantiating this class will create a temporary directory and
29 arrange for its destruction (via the TestCmd superclass) and
30 copy all of the non-gyptest files in the directory hierarchy of the
31 executing script.
32
33 The default behavior is to test the 'gyp' or 'gyp.bat' file in the
34 current directory. An alternative may be specified explicitly on
35 instantiation, or by setting the TESTGYP_GYP environment variable.
36
37 This class should be subclassed for each supported gyp generator
38 (format). Various abstract methods below define calling signatures
39 used by the test scripts to invoke builds on the generated build
40 configuration and to run executables generated by those builds.
41 """
42
43 build_tool = None
44 build_tool_list = []
45
46 _exe = TestCommon.exe_suffix
47 _obj = TestCommon.obj_suffix
48 shobj_ = TestCommon.shobj_prefix
49 _shobj = TestCommon.shobj_suffix
50 lib_ = TestCommon.lib_prefix
51 _lib = TestCommon.lib_suffix
52 dll_ = TestCommon.dll_prefix
53 _dll = TestCommon.dll_suffix
54
55 # Constants to represent different targets.
56 ALL = '__all__'
57 DEFAULT = '__default__'
58
59 # Constants for different target types.
60 EXECUTABLE = '__executable__'
61 STATIC_LIB = '__static_lib__'
62 SHARED_LIB = '__shared_lib__'
63
64 def __init__(self, gyp=None, *args, **kw):
65 self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
66
67 if not gyp:
68 gyp = os.environ.get('TESTGYP_GYP')
69 if not gyp:
70 if sys.platform == 'win32':
71 gyp = 'gyp.bat'
72 else:
73 gyp = 'gyp'
74 self.gyp = os.path.abspath(gyp)
75
76 self.initialize_build_tool()
77
78 if not kw.has_key('match'):
79 kw['match'] = TestCommon.match_exact
80
81 if not kw.has_key('workdir'):
82 # Default behavior: the null string causes TestCmd to create
83 # a temporary directory for us.
84 kw['workdir'] = ''
85
86 formats = kw.get('formats', [])
87 if kw.has_key('formats'):
88 del kw['formats']
89
90 super(TestGypBase, self).__init__(*args, **kw)
91
92 excluded_formats = set([f for f in formats if f[0] == '!'])
93 included_formats = set(formats) - excluded_formats
94 if ('!'+self.format in excluded_formats or
95 included_formats and self.format not in included_formats):
96 msg = 'Invalid test for %r format; skipping test.\n'
97 self.skip_test(msg % self.format)
98
99 self.copy_test_configuration(self.origin_cwd, self.workdir)
100 self.set_configuration(None)
101
102 def built_file_must_exist(self, name, type=None, **kw):
103 """
104 Fails the test if the specified built file name does not exist.
105 """
106 return self.must_exist(self.built_file_path(name, type, **kw))
107
108 def built_file_must_not_exist(self, name, type=None, **kw):
109 """
110 Fails the test if the specified built file name exists.
111 """
112 return self.must_not_exist(self.built_file_path(name, type, **kw))
113
114 def built_file_must_match(self, name, contents, **kw):
115 """
116 Fails the test if the contents of the specified built file name
117 do not match the specified contents.
118 """
119 return self.must_match(self.built_file_path(name, **kw), contents)
120
121 def built_file_must_not_match(self, name, contents, **kw):
122 """
123 Fails the test if the contents of the specified built file name
124 match the specified contents.
125 """
126 return self.must_not_match(self.built_file_path(name, **kw), contents)
127
128 def copy_test_configuration(self, source_dir, dest_dir):
129 """
130 Copies the test configuration from the specified source_dir
131 (the directory in which the test script lives) to the
132 specified dest_dir (a temporary working directory).
133
134 This ignores all files and directories that begin with
135 the string 'gyptest', and all '.svn' subdirectories.
136 """
137 for root, dirs, files in os.walk(source_dir):
138 if '.svn' in dirs:
139 dirs.remove('.svn')
140 dirs = [ d for d in dirs if not d.startswith('gyptest') ]
141 files = [ f for f in files if not f.startswith('gyptest') ]
142 for dirname in dirs:
143 source = os.path.join(root, dirname)
144 destination = source.replace(source_dir, dest_dir)
145 os.mkdir(destination)
146 if sys.platform != 'win32':
147 shutil.copystat(source, destination)
148 for filename in files:
149 source = os.path.join(root, filename)
150 destination = source.replace(source_dir, dest_dir)
151 shutil.copy2(source, destination)
152
153 def initialize_build_tool(self):
154 """
155 Initializes the .build_tool attribute.
156
157 Searches the .build_tool_list for an executable name on the user's
158 $PATH. The first tool on the list is used as-is if nothing is found
159 on the current $PATH.
160 """
161 for build_tool in self.build_tool_list:
162 if not build_tool:
163 continue
164 if os.path.isabs(build_tool):
165 self.build_tool = build_tool
166 return
167 build_tool = self.where_is(build_tool)
168 if build_tool:
169 self.build_tool = build_tool
170 return
171
172 if self.build_tool_list:
173 self.build_tool = self.build_tool_list[0]
174
175 def relocate(self, source, destination):
176 """
177 Renames (relocates) the specified source (usually a directory)
178 to the specified destination, creating the destination directory
179 first if necessary.
180
181 Note: Don't use this as a generic "rename" operation. In the
182 future, "relocating" parts of a GYP tree may affect the state of
183 the test to modify the behavior of later method calls.
184 """
185 destination_dir = os.path.dirname(destination)
186 if not os.path.exists(destination_dir):
187 self.subdir(destination_dir)
188 os.rename(source, destination)
189
190 def report_not_up_to_date(self):
191 """
192 Reports that a build is not up-to-date.
193
194 This provides common reporting for formats that have complicated
195 conditions for checking whether a build is up-to-date. Formats
196 that expect exact output from the command (make, scons) can
197 just set stdout= when they call the run_build() method.
198 """
199 print "Build is not up-to-date:"
200 print self.banner('STDOUT ')
201 print self.stdout()
202 stderr = self.stderr()
203 if stderr:
204 print self.banner('STDERR ')
205 print stderr
206
207 def run_gyp(self, gyp_file, *args, **kw):
208 """
209 Runs gyp against the specified gyp_file with the specified args.
210 """
211 # TODO: --depth=. works around Chromium-specific tree climbing.
212 args = ('--depth=.', '--format='+self.format, gyp_file) + args
213 return self.run(program=self.gyp, arguments=args, **kw)
214
215 def run(self, *args, **kw):
216 """
217 Executes a program by calling the superclass .run() method.
218
219 This exists to provide a common place to filter out keyword
220 arguments implemented in this layer, without having to update
221 the tool-specific subclasses or clutter the tests themselves
222 with platform-specific code.
223 """
224 if kw.has_key('SYMROOT'):
225 del kw['SYMROOT']
226 super(TestGypBase, self).run(*args, **kw)
227
228 def set_configuration(self, configuration):
229 """
230 Sets the configuration, to be used for invoking the build
231 tool and testing potential built output.
232 """
233 self.configuration = configuration
234
235 def configuration_dirname(self):
236 if self.configuration:
237 return self.configuration.split('|')[0]
238 else:
239 return 'Default'
240
241 def configuration_buildname(self):
242 if self.configuration:
243 return self.configuration
244 else:
245 return 'Default'
246
247 #
248 # Abstract methods to be defined by format-specific subclasses.
249 #
250
251 def build(self, gyp_file, target=None, **kw):
252 """
253 Runs a build of the specified target against the configuration
254 generated from the specified gyp_file.
255
256 A 'target' argument of None or the special value TestGyp.DEFAULT
257 specifies the default argument for the underlying build tool.
258 A 'target' argument of TestGyp.ALL specifies the 'all' target
259 (if any) of the underlying build tool.
260 """
261 raise NotImplementedError
262
263 def built_file_path(self, name, type=None, **kw):
264 """
265 Returns a path to the specified file name, of the specified type.
266 """
267 raise NotImplementedError
268
269 def built_file_basename(self, name, type=None, **kw):
270 """
271 Returns the base name of the specified file name, of the specified type.
272
273 A bare=True keyword argument specifies that prefixes and suffixes shouldn't
274 be applied.
275 """
276 if not kw.get('bare'):
277 if type == self.EXECUTABLE:
278 name = name + self._exe
279 elif type == self.STATIC_LIB:
280 name = self.lib_ + name + self._lib
281 elif type == self.SHARED_LIB:
282 name = self.dll_ + name + self._dll
283 return name
284
285 def run_built_executable(self, name, *args, **kw):
286 """
287 Runs an executable program built from a gyp-generated configuration.
288
289 The specified name should be independent of any particular generator.
290 Subclasses should find the output executable in the appropriate
291 output build directory, tack on any necessary executable suffix, etc.
292 """
293 raise NotImplementedError
294
295 def up_to_date(self, gyp_file, target=None, **kw):
296 """
297 Verifies that a build of the specified target is up to date.
298
299 The subclass should implement this by calling build()
300 (or a reasonable equivalent), checking whatever conditions
301 will tell it the build was an "up to date" null build, and
302 failing if it isn't.
303 """
304 raise NotImplementedError
305
306
307 class TestGypGypd(TestGypBase):
308 """
309 Subclass for testing the GYP 'gypd' generator (spit out the
310 internal data structure as pretty-printed Python).
311 """
312 format = 'gypd'
313
314
315 class TestGypMake(TestGypBase):
316 """
317 Subclass for testing the GYP Make generator.
318 """
319 format = 'make'
320 build_tool_list = ['make']
321 ALL = 'all'
322 def build(self, gyp_file, target=None, **kw):
323 """
324 Runs a Make build using the Makefiles generated from the specified
325 gyp_file.
326 """
327 arguments = kw.get('arguments', [])[:]
328 if self.configuration:
329 arguments.append('BUILDTYPE=' + self.configuration)
330 if target not in (None, self.DEFAULT):
331 arguments.append(target)
332 # Sub-directory builds provide per-gyp Makefiles (i.e.
333 # Makefile.gyp_filename), so use that if there is no Makefile.
334 chdir = kw.get('chdir', '')
335 if not os.path.exists(os.path.join(chdir, 'Makefile')):
336 print "NO Makefile in " + os.path.join(chdir, 'Makefile')
337 arguments.insert(0, '-f')
338 arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile')
339 kw['arguments'] = arguments
340 return self.run(program=self.build_tool, **kw)
341 def up_to_date(self, gyp_file, target=None, **kw):
342 """
343 Verifies that a build of the specified Make target is up to date.
344 """
345 if target in (None, self.DEFAULT):
346 message_target = 'all'
347 else:
348 message_target = target
349 kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target
350 return self.build(gyp_file, target, **kw)
351 def run_built_executable(self, name, *args, **kw):
352 """
353 Runs an executable built by Make.
354 """
355 configuration = self.configuration_dirname()
356 libdir = os.path.join('out', configuration, 'lib')
357 # TODO(piman): when everything is cross-compile safe, remove lib.target
358 os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
359 # Enclosing the name in a list avoids prepending the original dir.
360 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
361 return self.run(program=program, *args, **kw)
362 def built_file_path(self, name, type=None, **kw):
363 """
364 Returns a path to the specified file name, of the specified type,
365 as built by Make.
366
367 Built files are in the subdirectory 'out/{configuration}'.
368 The default is 'out/Default'.
369
370 A chdir= keyword argument specifies the source directory
371 relative to which the output subdirectory can be found.
372
373 "type" values of STATIC_LIB or SHARED_LIB append the necessary
374 prefixes and suffixes to a platform-independent library base name.
375
376 A libdir= keyword argument specifies a library subdirectory other
377 than the default 'obj.target'.
378 """
379 result = []
380 chdir = kw.get('chdir')
381 if chdir:
382 result.append(chdir)
383 configuration = self.configuration_dirname()
384 result.extend(['out', configuration])
385 if type == self.STATIC_LIB:
386 result.append(kw.get('libdir', 'obj.target'))
387 elif type == self.SHARED_LIB:
388 result.append(kw.get('libdir', 'lib.target'))
389 result.append(self.built_file_basename(name, type, **kw))
390 return self.workpath(*result)
391
392
393 class TestGypMSVS(TestGypBase):
394 """
395 Subclass for testing the GYP Visual Studio generator.
396 """
397 format = 'msvs'
398
399 u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
400 up_to_date_re = re.compile(u, re.M)
401
402 # Initial None element will indicate to our .initialize_build_tool()
403 # method below that 'devenv' was not found on %PATH%.
404 #
405 # Note: we must use devenv.com to be able to capture build output.
406 # Directly executing devenv.exe only sends output to BuildLog.htm.
407 build_tool_list = [None, 'devenv.com']
408
409 def initialize_build_tool(self):
410 """
411 Initializes the Visual Studio .build_tool parameter, searching %PATH%
412 and %PATHEXT% for a devenv.{exe,bat,...} executable, and falling
413 back to a hard-coded default (on the current drive) if necessary.
414 """
415 super(TestGypMSVS, self).initialize_build_tool()
416 if not self.build_tool:
417 # We didn't find 'devenv' on the path. Just hard-code a default,
418 # and revisit this if it becomes important.
419 possible = [
420 # Note: if you're using this, set GYP_MSVS_VERSION=2008
421 # to get the tests to pass.
422 ('C:\\Program Files (x86)',
423 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'),
424 ('C:\\Program Files',
425 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'),
426 ('C:\\Program Files (x86)',
427 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'),
428 ('C:\\Program Files',
429 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'),
430 ]
431 for build_tool in possible:
432 bt = os.path.join(*build_tool)
433 if os.path.exists(bt):
434 self.build_tool = bt
435 break
436 def build(self, gyp_file, target=None, rebuild=False, **kw):
437 """
438 Runs a Visual Studio build using the configuration generated
439 from the specified gyp_file.
440 """
441 configuration = self.configuration_buildname()
442 if rebuild:
443 build = '/Rebuild'
444 else:
445 build = '/Build'
446 arguments = kw.get('arguments', [])[:]
447 arguments.extend([gyp_file.replace('.gyp', '.sln'),
448 build, configuration])
449 # Note: the Visual Studio generator doesn't add an explicit 'all'
450 # target, so we just treat it the same as the default.
451 if target not in (None, self.ALL, self.DEFAULT):
452 arguments.extend(['/Project', target])
453 if self.configuration:
454 arguments.extend(['/ProjectConfig', self.configuration])
455 kw['arguments'] = arguments
456 return self.run(program=self.build_tool, **kw)
457 def up_to_date(self, gyp_file, target=None, **kw):
458 """
459 Verifies that a build of the specified Visual Studio target is up to date.
460 """
461 result = self.build(gyp_file, target, **kw)
462 if not result:
463 stdout = self.stdout()
464 m = self.up_to_date_re.search(stdout)
465 if not m or m.group(1) == '0':
466 self.report_not_up_to_date()
467 self.fail_test()
468 return result
469 def run_built_executable(self, name, *args, **kw):
470 """
471 Runs an executable built by Visual Studio.
472 """
473 configuration = self.configuration_dirname()
474 # Enclosing the name in a list avoids prepending the original dir.
475 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
476 return self.run(program=program, *args, **kw)
477 def built_file_path(self, name, type=None, **kw):
478 """
479 Returns a path to the specified file name, of the specified type,
480 as built by Visual Studio.
481
482 Built files are in a subdirectory that matches the configuration
483 name. The default is 'Default'.
484
485 A chdir= keyword argument specifies the source directory
486 relative to which the output subdirectory can be found.
487
488 "type" values of STATIC_LIB or SHARED_LIB append the necessary
489 prefixes and suffixes to a platform-independent library base name.
490 """
491 result = []
492 chdir = kw.get('chdir')
493 if chdir:
494 result.append(chdir)
495 result.append(self.configuration_dirname())
496 if type == self.STATIC_LIB:
497 result.append('lib')
498 result.append(self.built_file_basename(name, type, **kw))
499 return self.workpath(*result)
500
501
502 class TestGypSCons(TestGypBase):
503 """
504 Subclass for testing the GYP SCons generator.
505 """
506 format = 'scons'
507 build_tool_list = ['scons', 'scons.py']
508 ALL = 'all'
509 def build(self, gyp_file, target=None, **kw):
510 """
511 Runs a scons build using the SCons configuration generated from the
512 specified gyp_file.
513 """
514 arguments = kw.get('arguments', [])[:]
515 dirname = os.path.dirname(gyp_file)
516 if dirname:
517 arguments.extend(['-C', dirname])
518 if self.configuration:
519 arguments.append('--mode=' + self.configuration)
520 if target not in (None, self.DEFAULT):
521 arguments.append(target)
522 kw['arguments'] = arguments
523 return self.run(program=self.build_tool, **kw)
524 def up_to_date(self, gyp_file, target=None, **kw):
525 """
526 Verifies that a build of the specified SCons target is up to date.
527 """
528 if target in (None, self.DEFAULT):
529 up_to_date_targets = 'all'
530 else:
531 up_to_date_targets = target
532 up_to_date_lines = []
533 for arg in up_to_date_targets.split():
534 up_to_date_lines.append("scons: `%s' is up to date.\n" % arg)
535 kw['stdout'] = ''.join(up_to_date_lines)
536 arguments = kw.get('arguments', [])[:]
537 arguments.append('-Q')
538 kw['arguments'] = arguments
539 return self.build(gyp_file, target, **kw)
540 def run_built_executable(self, name, *args, **kw):
541 """
542 Runs an executable built by scons.
543 """
544 configuration = self.configuration_dirname()
545 os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib')
546 # Enclosing the name in a list avoids prepending the original dir.
547 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
548 return self.run(program=program, *args, **kw)
549 def built_file_path(self, name, type=None, **kw):
550 """
551 Returns a path to the specified file name, of the specified type,
552 as built by Scons.
553
554 Built files are in a subdirectory that matches the configuration
555 name. The default is 'Default'.
556
557 A chdir= keyword argument specifies the source directory
558 relative to which the output subdirectory can be found.
559
560 "type" values of STATIC_LIB or SHARED_LIB append the necessary
561 prefixes and suffixes to a platform-independent library base name.
562 """
563 result = []
564 chdir = kw.get('chdir')
565 if chdir:
566 result.append(chdir)
567 result.append(self.configuration_dirname())
568 if type in (self.STATIC_LIB, self.SHARED_LIB):
569 result.append('lib')
570 result.append(self.built_file_basename(name, type, **kw))
571 return self.workpath(*result)
572
573
574 class TestGypXcode(TestGypBase):
575 """
576 Subclass for testing the GYP Xcode generator.
577 """
578 format = 'xcode'
579 build_tool_list = ['xcodebuild']
580
581 phase_script_execution = ("\n"
582 "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
583 " cd /\\S+\n"
584 " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
585 "(make: Nothing to be done for `all'\\.\n)?")
586
587 strip_up_to_date_expressions = [
588 # Various actions or rules can run even when the overall build target
589 # is up to date. Strip those phases' GYP-generated output.
590 re.compile(phase_script_execution, re.S),
591
592 # The message from distcc_pump can trail the "BUILD SUCCEEDED"
593 # message, so strip that, too.
594 re.compile('__________Shutting down distcc-pump include server\n', re.S),
595 ]
596
597 up_to_date_ending = 'Checking Dependencies...\n** BUILD SUCCEEDED **\n'
598
599 def build(self, gyp_file, target=None, **kw):
600 """
601 Runs an xcodebuild using the .xcodeproj generated from the specified
602 gyp_file.
603 """
604 # Be sure we're working with a copy of 'arguments' since we modify it.
605 # The caller may not be expecting it to be modified.
606 arguments = kw.get('arguments', [])[:]
607 arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
608 if target == self.ALL:
609 arguments.append('-alltargets',)
610 elif target not in (None, self.DEFAULT):
611 arguments.extend(['-target', target])
612 if self.configuration:
613 arguments.extend(['-configuration', self.configuration])
614 symroot = kw.get('SYMROOT', '$SRCROOT/build')
615 if symroot:
616 arguments.append('SYMROOT='+symroot)
617 kw['arguments'] = arguments
618 return self.run(program=self.build_tool, **kw)
619 def up_to_date(self, gyp_file, target=None, **kw):
620 """
621 Verifies that a build of the specified Xcode target is up to date.
622 """
623 result = self.build(gyp_file, target, **kw)
624 if not result:
625 output = self.stdout()
626 for expression in self.strip_up_to_date_expressions:
627 output = expression.sub('', output)
628 if not output.endswith(self.up_to_date_ending):
629 self.report_not_up_to_date()
630 self.fail_test()
631 return result
632 def run_built_executable(self, name, *args, **kw):
633 """
634 Runs an executable built by xcodebuild.
635 """
636 configuration = self.configuration_dirname()
637 os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
638 # Enclosing the name in a list avoids prepending the original dir.
639 program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
640 return self.run(program=program, *args, **kw)
641 def built_file_path(self, name, type=None, **kw):
642 """
643 Returns a path to the specified file name, of the specified type,
644 as built by Xcode.
645
646 Built files are in the subdirectory 'build/{configuration}'.
647 The default is 'build/Default'.
648
649 A chdir= keyword argument specifies the source directory
650 relative to which the output subdirectory can be found.
651
652 "type" values of STATIC_LIB or SHARED_LIB append the necessary
653 prefixes and suffixes to a platform-independent library base name.
654 """
655 result = []
656 chdir = kw.get('chdir')
657 if chdir:
658 result.append(chdir)
659 configuration = self.configuration_dirname()
660 result.extend(['build', configuration])
661 result.append(self.built_file_basename(name, type, **kw))
662 return self.workpath(*result)
663
664
665 format_class_list = [
666 TestGypGypd,
667 TestGypMake,
668 TestGypMSVS,
669 TestGypSCons,
670 TestGypXcode,
671 ]
672
673 def TestGyp(*args, **kw):
674 """
675 Returns an appropriate TestGyp* instance for a specified GYP format.
676 """
677 format = kw.get('format')
678 if format:
679 del kw['format']
680 else:
681 format = os.environ.get('TESTGYP_FORMAT')
682 for format_class in format_class_list:
683 if format == format_class.format:
684 return format_class(*args, **kw)
685 raise Exception, "unknown format %r" % format
+0
-84
third_party/gyp/test/library/gyptest-shared.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with shared libraries,
8 including verifying that libraries are rebuilt correctly when functions
9 move between libraries.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('library.gyp',
17 '-Dlibrary=shared_library',
18 '-Dmoveable_function=lib1',
19 chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23 test.build('library.gyp', test.ALL, chdir='relocate/src')
24
25 expect = """\
26 Hello from program.c
27 Hello from lib1.c
28 Hello from lib2.c
29 Hello from lib1_moveable.c
30 """
31 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
32
33
34 test.run_gyp('library.gyp',
35 '-Dlibrary=shared_library',
36 '-Dmoveable_function=lib2',
37 chdir='relocate/src')
38
39 # Update program.c to force a rebuild.
40 test.sleep()
41 contents = test.read('relocate/src/program.c')
42 contents = contents.replace('Hello', 'Hello again')
43 test.write('relocate/src/program.c', contents)
44
45 test.build('library.gyp', test.ALL, chdir='relocate/src')
46
47 expect = """\
48 Hello again from program.c
49 Hello from lib1.c
50 Hello from lib2.c
51 Hello from lib2_moveable.c
52 """
53 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
54
55
56 test.run_gyp('library.gyp',
57 '-Dlibrary=shared_library',
58 '-Dmoveable_function=lib1',
59 chdir='relocate/src')
60
61 # Update program.c to force a rebuild.
62 test.sleep()
63 contents = test.read('relocate/src/program.c')
64 contents = contents.replace('again', 'again again')
65 test.write('relocate/src/program.c', contents)
66
67 # TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
68 # the "moved" module. This should be done in gyp by adding a dependency
69 # on the generated .vcproj file itself.
70 test.touch('relocate/src/lib2.c')
71
72 test.build('library.gyp', test.ALL, chdir='relocate/src')
73
74 expect = """\
75 Hello again again from program.c
76 Hello from lib1.c
77 Hello from lib2.c
78 Hello from lib1_moveable.c
79 """
80 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
81
82
83 test.pass_test()
+0
-84
third_party/gyp/test/library/gyptest-static.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with static libraries,
8 including verifying that libraries are rebuilt correctly when functions
9 move between libraries.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('library.gyp',
17 '-Dlibrary=static_library',
18 '-Dmoveable_function=lib1',
19 chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23 test.build('library.gyp', test.ALL, chdir='relocate/src')
24
25 expect = """\
26 Hello from program.c
27 Hello from lib1.c
28 Hello from lib2.c
29 Hello from lib1_moveable.c
30 """
31 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
32
33
34 test.run_gyp('library.gyp',
35 '-Dlibrary=static_library',
36 '-Dmoveable_function=lib2',
37 chdir='relocate/src')
38
39 # Update program.c to force a rebuild.
40 test.sleep()
41 contents = test.read('relocate/src/program.c')
42 contents = contents.replace('Hello', 'Hello again')
43 test.write('relocate/src/program.c', contents)
44
45 test.build('library.gyp', test.ALL, chdir='relocate/src')
46
47 expect = """\
48 Hello again from program.c
49 Hello from lib1.c
50 Hello from lib2.c
51 Hello from lib2_moveable.c
52 """
53 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
54
55
56 test.run_gyp('library.gyp',
57 '-Dlibrary=static_library',
58 '-Dmoveable_function=lib1',
59 chdir='relocate/src')
60
61 # Update program.c and lib2.c to force a rebuild.
62 test.sleep()
63 contents = test.read('relocate/src/program.c')
64 contents = contents.replace('again', 'again again')
65 test.write('relocate/src/program.c', contents)
66
67 # TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
68 # the "moved" module. This should be done in gyp by adding a dependency
69 # on the generated .vcproj file itself.
70 test.touch('relocate/src/lib2.c')
71
72 test.build('library.gyp', test.ALL, chdir='relocate/src')
73
74 expect = """\
75 Hello again again from program.c
76 Hello from lib1.c
77 Hello from lib2.c
78 Hello from lib1_moveable.c
79 """
80 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
81
82
83 test.pass_test()
+0
-10
third_party/gyp/test/library/src/lib1.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void lib1_function(void)
6 {
7 fprintf(stdout, "Hello from lib1.c\n");
8 fflush(stdout);
9 }
+0
-10
third_party/gyp/test/library/src/lib1_moveable.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void moveable_function(void)
6 {
7 fprintf(stdout, "Hello from lib1_moveable.c\n");
8 fflush(stdout);
9 }
+0
-10
third_party/gyp/test/library/src/lib2.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void lib2_function(void)
6 {
7 fprintf(stdout, "Hello from lib2.c\n");
8 fflush(stdout);
9 }
+0
-10
third_party/gyp/test/library/src/lib2_moveable.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void moveable_function(void)
6 {
7 fprintf(stdout, "Hello from lib2_moveable.c\n");
8 fflush(stdout);
9 }
+0
-58
third_party/gyp/test/library/src/library.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'moveable_function%': 0,
7 },
8 'targets': [
9 {
10 'target_name': 'program',
11 'type': 'executable',
12 'dependencies': [
13 'lib1',
14 'lib2',
15 ],
16 'sources': [
17 'program.c',
18 ],
19 },
20 {
21 'target_name': 'lib1',
22 'type': '<(library)',
23 'sources': [
24 'lib1.c',
25 ],
26 'conditions': [
27 ['moveable_function=="lib1"', {
28 'sources': [
29 'lib1_moveable.c',
30 ],
31 }],
32 ],
33 },
34 {
35 'target_name': 'lib2',
36 'type': '<(library)',
37 'sources': [
38 'lib2.c',
39 ],
40 'conditions': [
41 ['moveable_function=="lib2"', {
42 'sources': [
43 'lib2_moveable.c',
44 ],
45 }],
46 ],
47 },
48 ],
49 'conditions': [
50 ['OS=="linux"', {
51 'target_defaults': {
52 # Support 64-bit shared libs (also works fine for 32-bit).
53 'cflags': ['-fPIC'],
54 },
55 }],
56 ],
57 }
+0
-15
third_party/gyp/test/library/src/program.c less more
0 #include <stdio.h>
1
2 extern void lib1_function(void);
3 extern void lib2_function(void);
4 extern void moveable_function(void);
5
6 int main(int argc, char *argv[])
7 {
8 fprintf(stdout, "Hello from program.c\n");
9 fflush(stdout);
10 lib1_function();
11 lib2_function();
12 moveable_function();
13 return 0;
14 }
+0
-28
third_party/gyp/test/module/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple build of a "Hello, world!" program with loadable modules. The
8 default for all platforms should be to output the loadable modules to the same
9 path as the executable.
10 """
11
12 import TestGyp
13
14 test = TestGyp.TestGyp()
15
16 test.run_gyp('module.gyp', chdir='src')
17
18 test.build('module.gyp', test.ALL, chdir='src')
19
20 expect = """\
21 Hello from program.c
22 Hello from lib1.c
23 Hello from lib2.c
24 """
25 test.run_built_executable('program', chdir='src', stdout=expect)
26
27 test.pass_test()
+0
-10
third_party/gyp/test/module/src/lib1.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void module_main(void)
6 {
7 fprintf(stdout, "Hello from lib1.c\n");
8 fflush(stdout);
9 }
+0
-10
third_party/gyp/test/module/src/lib2.c less more
0 #include <stdio.h>
1
2 #ifdef _WIN32
3 __declspec(dllexport)
4 #endif
5 void module_main(void)
6 {
7 fprintf(stdout, "Hello from lib2.c\n");
8 fflush(stdout);
9 }
+0
-55
third_party/gyp/test/module/src/module.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'conditions': [
7 ['OS=="win"', {
8 'defines': ['PLATFORM_WIN'],
9 }],
10 ['OS=="mac"', {
11 'defines': ['PLATFORM_MAC'],
12 }],
13 ['OS=="linux"', {
14 'defines': ['PLATFORM_LINUX'],
15 # Support 64-bit shared libs (also works fine for 32-bit).
16 'cflags': ['-fPIC'],
17 'ldflags': ['-ldl'],
18 }],
19 ],
20 },
21 'targets': [
22 {
23 'target_name': 'program',
24 'type': 'executable',
25 'dependencies': [
26 'lib1',
27 'lib2',
28 ],
29 'sources': [
30 'program.c',
31 ],
32 },
33 {
34 'target_name': 'lib1',
35 'type': 'loadable_module',
36 'product_name': 'lib1',
37 'product_prefix': '',
38 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
39 'sources': [
40 'lib1.c',
41 ],
42 },
43 {
44 'target_name': 'lib2',
45 'product_name': 'lib2',
46 'product_prefix': '',
47 'type': 'loadable_module',
48 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
49 'sources': [
50 'lib2.c',
51 ],
52 },
53 ],
54 }
+0
-111
third_party/gyp/test/module/src/program.c less more
0 #include <stdio.h>
1 #include <stdlib.h>
2
3 #if defined(PLATFORM_WIN)
4 #include <windows.h>
5 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
6 #include <dlfcn.h>
7 #include <libgen.h>
8 #include <string.h>
9 #include <sys/param.h>
10 #define MAX_PATH PATH_MAX
11 #endif
12
13 #if defined(PLATFORM_WIN)
14 #define MODULE_SUFFIX ".dll"
15 #elif defined(PLATFORM_MAC)
16 #define MODULE_SUFFIX ".dylib"
17 #elif defined(PLATFORM_LINUX)
18 #define MODULE_SUFFIX ".so"
19 #endif
20
21 typedef void (*module_symbol)(void);
22 char bin_path[MAX_PATH + 1];
23
24
25 void CallModule(const char* module) {
26 char module_path[MAX_PATH + 1];
27 const char* module_function = "module_main";
28 module_symbol funcptr;
29 #if defined(PLATFORM_WIN)
30 HMODULE dl;
31 char drive[_MAX_DRIVE];
32 char dir[_MAX_DIR];
33
34 if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR,
35 NULL, 0, NULL, 0)) {
36 fprintf(stderr, "Failed to split executable path.\n");
37 return;
38 }
39 if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) {
40 fprintf(stderr, "Failed to calculate module path.\n");
41 return;
42 }
43
44 dl = LoadLibrary(module_path);
45 if (!dl) {
46 fprintf(stderr, "Failed to open module: %s\n", module_path);
47 return;
48 }
49
50 funcptr = (module_symbol) GetProcAddress(dl, module_function);
51 if (!funcptr) {
52 fprintf(stderr, "Failed to find symbol: %s\n", module_function);
53 return;
54 }
55 funcptr();
56
57 FreeLibrary(dl);
58 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
59 void* dl;
60 char* path_copy = strdup(bin_path);
61 char* bin_dir = dirname(path_copy);
62 int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module,
63 MODULE_SUFFIX);
64 free(path_copy);
65 if (path_size < 0 || path_size > MAX_PATH) {
66 fprintf(stderr, "Failed to calculate module path.\n");
67 return;
68 }
69 module_path[path_size] = 0;
70
71 dl = dlopen(module_path, RTLD_LAZY);
72 if (!dl) {
73 fprintf(stderr, "Failed to open module: %s\n", module_path);
74 return;
75 }
76
77 funcptr = dlsym(dl, module_function);
78 if (!funcptr) {
79 fprintf(stderr, "Failed to find symbol: %s\n", module_function);
80 return;
81 }
82 funcptr();
83
84 dlclose(dl);
85 #endif
86 }
87
88 int main(int argc, char *argv[])
89 {
90 fprintf(stdout, "Hello from program.c\n");
91 fflush(stdout);
92
93 #if defined(PLATFORM_WIN)
94 if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) {
95 fprintf(stderr, "Failed to determine executable path.\n");
96 return;
97 }
98 #elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
99 // Using argv[0] should be OK here since we control how the tests run, and
100 // can avoid exec and such issues that make it unreliable.
101 if (!realpath(argv[0], bin_path)) {
102 fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]);
103 return;
104 }
105 #endif
106
107 CallModule("lib1");
108 CallModule("lib2");
109 return 0;
110 }
+0
-22
third_party/gyp/test/msvs/express/base/base.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'a',
8 'type': 'static_library',
9 'sources': [
10 'a.c',
11 ],
12 },
13 {
14 'target_name': 'b',
15 'type': 'static_library',
16 'sources': [
17 'b.c',
18 ],
19 },
20 ],
21 }
+0
-19
third_party/gyp/test/msvs/express/express.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'express',
8 'type': 'executable',
9 'dependencies': [
10 'base/base.gyp:a',
11 'base/base.gyp:b',
12 ],
13 'sources': [
14 'main.c',
15 ],
16 },
17 ],
18 }
+0
-29
third_party/gyp/test/msvs/express/gyptest-express.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that flat solutions get generated for Express versions of
8 Visual Studio.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp(formats=['msvs'])
14
15 test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
16 test.must_contain('express.sln', '(base)')
17
18 test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
19 test.must_contain('express.sln', '(base)')
20
21 test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
22 test.must_not_contain('express.sln', '(base)')
23
24 test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
25 test.must_not_contain('express.sln', '(base)')
26
27
28 test.pass_test()
+0
-35
third_party/gyp/test/multiple-targets/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('multiple.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 # TODO(sgk): remove stderr=None when the --generator-output= support
18 # gets rid of the scons warning
19 test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None)
20
21 expect1 = """\
22 hello from prog1.c
23 hello from common.c
24 """
25
26 expect2 = """\
27 hello from prog2.c
28 hello from common.c
29 """
30
31 test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
32 test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
33
34 test.pass_test()
+0
-35
third_party/gyp/test/multiple-targets/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('multiple.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 # TODO(sgk): remove stderr=None when the --generator-output= support
18 # gets rid of the scons warning
19 test.build('multiple.gyp', chdir='relocate/src', stderr=None)
20
21 expect1 = """\
22 hello from prog1.c
23 hello from common.c
24 """
25
26 expect2 = """\
27 hello from prog2.c
28 hello from common.c
29 """
30
31 test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
32 test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
33
34 test.pass_test()
+0
-7
third_party/gyp/test/multiple-targets/src/common.c less more
0 #include <stdio.h>
1
2 void common(void)
3 {
4 printf("hello from common.c\n");
5 return;
6 }
+0
-24
third_party/gyp/test/multiple-targets/src/multiple.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'sources': [
10 'prog1.c',
11 'common.c',
12 ],
13 },
14 {
15 'target_name': 'prog2',
16 'type': 'executable',
17 'sources': [
18 'prog2.c',
19 'common.c',
20 ],
21 },
22 ],
23 }
+0
-10
third_party/gyp/test/multiple-targets/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void common(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("hello from prog1.c\n");
7 common();
8 return 0;
9 }
+0
-10
third_party/gyp/test/multiple-targets/src/prog2.c less more
0 #include <stdio.h>
1
2 extern void common(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("hello from prog2.c\n");
7 common();
8 return 0;
9 }
+0
-19
third_party/gyp/test/no-output/gyptest-no-output.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verified things don't explode when there are targets without outputs.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('nooutput.gyp', chdir='src')
15 test.relocate('src', 'relocate/src')
16 test.build('nooutput.gyp', chdir='relocate/src')
17
18 test.pass_test()
+0
-17
third_party/gyp/test/no-output/src/nooutput.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'no_output',
8 'type': 'none',
9 'direct_dependent_settings': {
10 'defines': [
11 'NADA',
12 ],
13 },
14 },
15 ],
16 }
+0
-43
third_party/gyp/test/product/gyptest-product.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simplest-possible build of a "Hello, world!" program
8 using the default build target.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('product.gyp')
16 test.build('product.gyp')
17
18 # executables
19 test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True)
20 test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True)
21 test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True)
22
23 # shared libraries
24 test.built_file_must_exist(test.dll_ + 'alt4' + test._dll,
25 test.SHARED_LIB, bare=True)
26 test.built_file_must_exist(test.dll_ + 'hello5.stuff',
27 test.SHARED_LIB, bare=True)
28 test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True)
29
30 # static libraries
31 test.built_file_must_exist(test.lib_ + 'alt7' + test._lib,
32 test.STATIC_LIB, bare=True)
33 test.built_file_must_exist(test.lib_ + 'hello8.stuff',
34 test.STATIC_LIB, bare=True)
35 test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True)
36
37 # alternate product_dir
38 test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True)
39 test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True)
40 test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True)
41
42 test.pass_test()
+0
-15
third_party/gyp/test/product/hello.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int func1(void) {
7 return 42;
8 }
9
10 int main(int argc, char *argv[]) {
11 printf("Hello, world!\n");
12 printf("%d\n", func1());
13 return 0;
14 }
+0
-128
third_party/gyp/test/product/product.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'hello1',
8 'product_name': 'alt1',
9 'type': 'executable',
10 'sources': [
11 'hello.c',
12 ],
13 },
14 {
15 'target_name': 'hello2',
16 'product_extension': 'stuff',
17 'type': 'executable',
18 'sources': [
19 'hello.c',
20 ],
21 },
22 {
23 'target_name': 'hello3',
24 'product_name': 'alt3',
25 'product_extension': 'stuff',
26 'product_prefix': 'yo',
27 'type': 'executable',
28 'sources': [
29 'hello.c',
30 ],
31 },
32
33 {
34 'target_name': 'hello4',
35 'product_name': 'alt4',
36 'type': 'shared_library',
37 'sources': [
38 'hello.c',
39 ],
40 },
41 {
42 'target_name': 'hello5',
43 'product_extension': 'stuff',
44 'type': 'shared_library',
45 'sources': [
46 'hello.c',
47 ],
48 },
49 {
50 'target_name': 'hello6',
51 'product_name': 'alt6',
52 'product_extension': 'stuff',
53 'product_prefix': 'yo',
54 'type': 'shared_library',
55 'sources': [
56 'hello.c',
57 ],
58 },
59
60 {
61 'target_name': 'hello7',
62 'product_name': 'alt7',
63 'type': 'static_library',
64 'sources': [
65 'hello.c',
66 ],
67 },
68 {
69 'target_name': 'hello8',
70 'product_extension': 'stuff',
71 'type': 'static_library',
72 'sources': [
73 'hello.c',
74 ],
75 },
76 {
77 'target_name': 'hello9',
78 'product_name': 'alt9',
79 'product_extension': 'stuff',
80 'product_prefix': 'yo',
81 'type': 'static_library',
82 'sources': [
83 'hello.c',
84 ],
85 },
86 {
87 'target_name': 'hello10',
88 'product_name': 'alt10',
89 'product_extension': 'stuff',
90 'product_prefix': 'yo',
91 'product_dir': '<(PRODUCT_DIR)/bob',
92 'type': 'executable',
93 'sources': [
94 'hello.c',
95 ],
96 },
97 {
98 'target_name': 'hello11',
99 'product_name': 'alt11',
100 'product_extension': 'stuff',
101 'product_prefix': 'yo',
102 'product_dir': '<(PRODUCT_DIR)/bob',
103 'type': 'shared_library',
104 'sources': [
105 'hello.c',
106 ],
107 },
108 {
109 'target_name': 'hello12',
110 'product_name': 'alt12',
111 'product_extension': 'stuff',
112 'product_prefix': 'yo',
113 'product_dir': '<(PRODUCT_DIR)/bob',
114 'type': 'static_library',
115 'sources': [
116 'hello.c',
117 ],
118 },
119 ],
120 'conditions': [
121 ['OS=="linux"', {
122 'target_defaults': {
123 'cflags': ['-fPIC'],
124 },
125 }],
126 ],
127 }
+0
-47
third_party/gyp/test/rules/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple rules when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('actions.gyp', test.ALL, chdir='relocate/src')
19
20 expect = """\
21 Hello from program.c
22 Hello from function1.in
23 Hello from function2.in
24 """
25
26 if test.format == 'xcode':
27 chdir = 'relocate/src/subdir1'
28 else:
29 chdir = 'relocate/src'
30 test.run_built_executable('program', chdir=chdir, stdout=expect)
31
32 expect = """\
33 Hello from program.c
34 Hello from function3.in
35 """
36
37 if test.format == 'xcode':
38 chdir = 'relocate/src/subdir3'
39 else:
40 chdir = 'relocate/src'
41 test.run_built_executable('program2', chdir=chdir, stdout=expect)
42
43 test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
44 test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
45
46 test.pass_test()
+0
-47
third_party/gyp/test/rules/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies simple rules when using an explicit build target of 'all'.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('actions.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('actions.gyp', chdir='relocate/src')
19
20 expect = """\
21 Hello from program.c
22 Hello from function1.in
23 Hello from function2.in
24 """
25
26 if test.format == 'xcode':
27 chdir = 'relocate/src/subdir1'
28 else:
29 chdir = 'relocate/src'
30 test.run_built_executable('program', chdir=chdir, stdout=expect)
31
32 expect = """\
33 Hello from program.c
34 Hello from function3.in
35 """
36
37 if test.format == 'xcode':
38 chdir = 'relocate/src/subdir3'
39 else:
40 chdir = 'relocate/src'
41 test.run_built_executable('program2', chdir=chdir, stdout=expect)
42
43 test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n")
44 test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n")
45
46 test.pass_test()
+0
-17
third_party/gyp/test/rules/src/actions.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'pull_in_all_actions',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/none.gyp:*',
12 'subdir3/executable2.gyp:*',
13 ],
14 },
15 ],
16 }
+0
-11
third_party/gyp/test/rules/src/copy-file.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5 import sys
6
7 contents = open(sys.argv[1], 'r').read()
8 open(sys.argv[2], 'wb').write(contents)
9
10 sys.exit(0)
+0
-37
third_party/gyp/test/rules/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program',
8 'type': 'executable',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'program.c',
12 'function1.in',
13 'function2.in',
14 ],
15 'rules': [
16 {
17 'rule_name': 'copy_file',
18 'extension': 'in',
19 'inputs': [
20 '../copy-file.py',
21 ],
22 'outputs': [
23 # TODO: fix SCons and Make to support generated files not
24 # in a variable-named path like <(INTERMEDIATE_DIR)
25 #'<(RULE_INPUT_ROOT).c',
26 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
27 ],
28 'action': [
29 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
30 ],
31 'process_outputs_as_sources': 1,
32 },
33 ],
34 },
35 ],
36 }
+0
-6
third_party/gyp/test/rules/src/subdir1/function1.in less more
0 #include <stdio.h>
1
2 void function1(void)
3 {
4 printf("Hello from function1.in\n");
5 }
+0
-6
third_party/gyp/test/rules/src/subdir1/function2.in less more
0 #include <stdio.h>
1
2 void function2(void)
3 {
4 printf("Hello from function2.in\n");
5 }
+0
-12
third_party/gyp/test/rules/src/subdir1/program.c less more
0 #include <stdio.h>
1
2 extern void function1(void);
3 extern void function2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from program.c\n");
8 function1();
9 function2();
10 return 0;
11 }
+0
-1
third_party/gyp/test/rules/src/subdir2/file1.in less more
0 Hello from file1.in
+0
-1
third_party/gyp/test/rules/src/subdir2/file2.in less more
0 Hello from file2.in
+0
-33
third_party/gyp/test/rules/src/subdir2/none.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'files',
8 'type': 'none',
9 'msvs_cygwin_shell': 0,
10 'sources': [
11 'file1.in',
12 'file2.in',
13 ],
14 'rules': [
15 {
16 'rule_name': 'copy_file',
17 'extension': 'in',
18 'inputs': [
19 '../copy-file.py',
20 ],
21 'outputs': [
22 '<(RULE_INPUT_ROOT).out',
23 ],
24 'action': [
25 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
26 ],
27 'process_outputs_as_sources': 1,
28 },
29 ],
30 },
31 ],
32 }
+0
-37
third_party/gyp/test/rules/src/subdir3/executable2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This one tests that rules are properly written if extensions are different
5 # between the target's sources (program.c) and the generated files
6 # (function3.cc)
7
8 {
9 'targets': [
10 {
11 'target_name': 'program2',
12 'type': 'executable',
13 'msvs_cygwin_shell': 0,
14 'sources': [
15 'program.c',
16 'function3.in',
17 ],
18 'rules': [
19 {
20 'rule_name': 'copy_file',
21 'extension': 'in',
22 'inputs': [
23 '../copy-file.py',
24 ],
25 'outputs': [
26 '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc',
27 ],
28 'action': [
29 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
30 ],
31 'process_outputs_as_sources': 1,
32 },
33 ],
34 },
35 ],
36 }
+0
-6
third_party/gyp/test/rules/src/subdir3/function3.in less more
0 #include <stdio.h>
1
2 extern "C" void function3(void)
3 {
4 printf("Hello from function3.in\n");
5 }
+0
-10
third_party/gyp/test/rules/src/subdir3/program.c less more
0 #include <stdio.h>
1
2 extern void function3(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from program.c\n");
7 function3();
8 return 0;
9 }
+0
-74
third_party/gyp/test/rules-rebuild/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a rule that generates multiple outputs rebuilds
8 correctly when the inputs change.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 if test.format == 'msvs':
16 msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n'
17 test.skip_test(msg)
18
19 test.run_gyp('same_target.gyp', chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23
24 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
25
26 expect = """\
27 Hello from main.c
28 Hello from prog1.in!
29 Hello from prog2.in!
30 """
31
32 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
33
34 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
35
36
37 test.sleep()
38 contents = test.read(['relocate', 'src', 'prog1.in'])
39 contents = contents.replace('!', ' AGAIN!')
40 test.write(['relocate', 'src', 'prog1.in'], contents)
41
42 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
43
44 expect = """\
45 Hello from main.c
46 Hello from prog1.in AGAIN!
47 Hello from prog2.in!
48 """
49
50 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
51
52 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
53
54
55 test.sleep()
56 contents = test.read(['relocate', 'src', 'prog2.in'])
57 contents = contents.replace('!', ' AGAIN!')
58 test.write(['relocate', 'src', 'prog2.in'], contents)
59
60 test.build('same_target.gyp', test.ALL, chdir='relocate/src')
61
62 expect = """\
63 Hello from main.c
64 Hello from prog1.in AGAIN!
65 Hello from prog2.in AGAIN!
66 """
67
68 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
69
70 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
71
72
73 test.pass_test()
+0
-74
third_party/gyp/test/rules-rebuild/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a rule that generates multiple outputs rebuilds
8 correctly when the inputs change.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 if test.format == 'msvs':
16 msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n'
17 test.skip_test(msg)
18
19 test.run_gyp('same_target.gyp', chdir='src')
20
21 test.relocate('src', 'relocate/src')
22
23
24 test.build('same_target.gyp', chdir='relocate/src')
25
26 expect = """\
27 Hello from main.c
28 Hello from prog1.in!
29 Hello from prog2.in!
30 """
31
32 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
33
34 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
35
36
37 test.sleep()
38 contents = test.read(['relocate', 'src', 'prog1.in'])
39 contents = contents.replace('!', ' AGAIN!')
40 test.write(['relocate', 'src', 'prog1.in'], contents)
41
42 test.build('same_target.gyp', chdir='relocate/src')
43
44 expect = """\
45 Hello from main.c
46 Hello from prog1.in AGAIN!
47 Hello from prog2.in!
48 """
49
50 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
51
52 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
53
54
55 test.sleep()
56 contents = test.read(['relocate', 'src', 'prog2.in'])
57 contents = contents.replace('!', ' AGAIN!')
58 test.write(['relocate', 'src', 'prog2.in'], contents)
59
60 test.build('same_target.gyp', chdir='relocate/src')
61
62 expect = """\
63 Hello from main.c
64 Hello from prog1.in AGAIN!
65 Hello from prog2.in AGAIN!
66 """
67
68 test.run_built_executable('program', chdir='relocate/src', stdout=expect)
69
70 test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
71
72
73 test.pass_test()
+0
-12
third_party/gyp/test/rules-rebuild/src/main.c less more
0 #include <stdio.h>
1
2 extern void prog1(void);
3 extern void prog2(void);
4
5 int main(int argc, char *argv[])
6 {
7 printf("Hello from main.c\n");
8 prog1();
9 prog2();
10 return 0;
11 }
+0
-15
third_party/gyp/test/rules-rebuild/src/make-sources.py less more
0 #!/usr/bin/env python
1 import sys
2
3 assert len(sys.argv) == 4, sys.argv
4
5 (in_file, c_file, h_file) = sys.argv[1:]
6
7 def write_file(filename, contents):
8 open(filename, 'wb').write(contents)
9
10 write_file(c_file, open(in_file, 'rb').read())
11
12 write_file(h_file, '#define NAME "%s"\n' % in_file)
13
14 sys.exit(0)
+0
-7
third_party/gyp/test/rules-rebuild/src/prog1.in less more
0 #include <stdio.h>
1 #include "prog1.h"
2
3 void prog1(void)
4 {
5 printf("Hello from %s!\n", NAME);
6 }
+0
-7
third_party/gyp/test/rules-rebuild/src/prog2.in less more
0 #include <stdio.h>
1 #include "prog2.h"
2
3 void prog2(void)
4 {
5 printf("Hello from %s!\n", NAME);
6 }
+0
-32
third_party/gyp/test/rules-rebuild/src/same_target.gyp less more
0 {
1 'targets': [
2 {
3 'target_name': 'program',
4 'type': 'executable',
5 'msvs_cygwin_shell': 0,
6 'sources': [
7 'main.c',
8 'prog1.in',
9 'prog2.in',
10 ],
11 'rules': [
12 {
13 'rule_name': 'make_sources',
14 'extension': 'in',
15 'msvs_external_rule': 1,
16 'inputs': [
17 'make-sources.py',
18 ],
19 'outputs': [
20 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
21 '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
22 ],
23 'action': [
24 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)',
25 ],
26 'process_outputs_as_sources': 1,
27 },
28 ],
29 },
30 ],
31 }
+0
-34
third_party/gyp/test/same-gyp-name/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp that depends on 2 gyp files with the same name.
8 """
9
10 import TestGyp
11
12 # This causes a problem on XCode (duplicate ID).
13 # See http://code.google.com/p/gyp/issues/detail?id=114
14 test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
15
16 test.run_gyp('all.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 test.build('all.gyp', test.ALL, chdir='relocate/src')
21
22 expect1 = """\
23 Hello from main1.cc
24 """
25
26 expect2 = """\
27 Hello from main2.cc
28 """
29
30 test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-34
third_party/gyp/test/same-gyp-name/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp that depends on 2 gyp files with the same name.
8 """
9
10 import TestGyp
11
12 # This causes a problem on XCode (duplicate ID).
13 # See http://code.google.com/p/gyp/issues/detail?id=114
14 test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make'])
15
16 test.run_gyp('all.gyp', chdir='src')
17
18 test.relocate('src', 'relocate/src')
19
20 test.build('all.gyp', chdir='relocate/src')
21
22 expect1 = """\
23 Hello from main1.cc
24 """
25
26 expect2 = """\
27 Hello from main2.cc
28 """
29
30 test.run_built_executable('program1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('program2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-16
third_party/gyp/test/same-gyp-name/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'all_exes',
8 'type': 'none',
9 'dependencies': [
10 'subdir1/executable.gyp:*',
11 'subdir2/executable.gyp:*',
12 ],
13 },
14 ],
15 }
+0
-15
third_party/gyp/test/same-gyp-name/src/subdir1/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program1',
8 'type': 'executable',
9 'sources': [
10 'main1.cc',
11 ],
12 },
13 ],
14 }
+0
-6
third_party/gyp/test/same-gyp-name/src/subdir1/main1.cc less more
0 #include <stdio.h>
1
2 int main() {
3 printf("Hello from main1.cc\n");
4 return 0;
5 }
+0
-15
third_party/gyp/test/same-gyp-name/src/subdir2/executable.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'program2',
8 'type': 'executable',
9 'sources': [
10 'main2.cc',
11 ],
12 },
13 ],
14 }
+0
-6
third_party/gyp/test/same-gyp-name/src/subdir2/main2.cc less more
0 #include <stdio.h>
1
2 int main() {
3 printf("Hello from main2.cc\n");
4 return 0;
5 }
+0
-34
third_party/gyp/test/same-name/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp with two targets that share a common .c source file.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('all.gyp', test.ALL, chdir='relocate/src')
19
20 expect1 = """\
21 Hello from prog1.c
22 Hello prog1 from func.c
23 """
24
25 expect2 = """\
26 Hello from prog2.c
27 Hello prog2 from func.c
28 """
29
30 test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-34
third_party/gyp/test/same-name/gyptest-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Build a .gyp with two targets that share a common .c source file.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('all.gyp', chdir='src')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('all.gyp', chdir='relocate/src')
19
20 expect1 = """\
21 Hello from prog1.c
22 Hello prog1 from func.c
23 """
24
25 expect2 = """\
26 Hello from prog2.c
27 Hello prog2 from func.c
28 """
29
30 test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
31 test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
32
33 test.pass_test()
+0
-38
third_party/gyp/test/same-name/src/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'defines': [
10 'PROG="prog1"',
11 ],
12 'sources': [
13 'prog1.c',
14 'func.c',
15 # Uncomment to test same-named files in different directories,
16 # which Visual Studio doesn't support.
17 #'subdir1/func.c',
18 #'subdir2/func.c',
19 ],
20 },
21 {
22 'target_name': 'prog2',
23 'type': 'executable',
24 'defines': [
25 'PROG="prog2"',
26 ],
27 'sources': [
28 'prog2.c',
29 'func.c',
30 # Uncomment to test same-named files in different directories,
31 # which Visual Studio doesn't support.
32 #'subdir1/func.c',
33 #'subdir2/func.c',
34 ],
35 },
36 ],
37 }
+0
-6
third_party/gyp/test/same-name/src/func.c less more
0 #include <stdio.h>
1
2 void func(void)
3 {
4 printf("Hello %s from func.c\n", PROG);
5 }
+0
-16
third_party/gyp/test/same-name/src/prog1.c less more
0 #include <stdio.h>
1
2 extern void func(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog1.c\n");
7 func();
8 /*
9 * Uncomment to test same-named files in different directories,
10 * which Visual Studio doesn't support.
11 subdir1_func();
12 subdir2_func();
13 */
14 return 0;
15 }
+0
-16
third_party/gyp/test/same-name/src/prog2.c less more
0 #include <stdio.h>
1
2 extern void func(void);
3
4 int main(int argc, char *argv[])
5 {
6 printf("Hello from prog2.c\n");
7 func();
8 /*
9 * Uncomment to test same-named files in different directories,
10 * which Visual Studio doesn't support.
11 subdir1_func();
12 subdir2_func();
13 */
14 return 0;
15 }
+0
-6
third_party/gyp/test/same-name/src/subdir1/func.c less more
0 #include <stdio.h>
1
2 void subdir1_func(void)
3 {
4 printf("Hello %s from subdir1/func.c\n", PROG);
5 }
+0
-6
third_party/gyp/test/same-name/src/subdir2/func.c less more
0 #include <stdio.h>
1
2 void subdir2_func(void)
3 {
4 printf("Hello %s from subdir2/func.c\n", PROG);
5 }
+0
-26
third_party/gyp/test/scons_tools/gyptest-tools.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that a scons build picks up tools modules specified
8 via 'scons_tools' in the 'scons_settings' dictionary.
9 """
10
11 import TestGyp
12
13 test = TestGyp.TestGyp()
14
15 test.run_gyp('tools.gyp')
16
17 test.build('tools.gyp', test.ALL)
18
19 if test.format == 'scons':
20 expect = "Hello, world!\n"
21 else:
22 expect = ""
23 test.run_built_executable('tools', stdout=expect)
24
25 test.pass_test()
+0
-10
third_party/gyp/test/scons_tools/site_scons/site_tools/this_tool.py less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # SCons "tool" module that simply sets a -D value.
5 def generate(env):
6 env['CPPDEFINES'] = ['THIS_TOOL']
7
8 def exists(env):
9 pass
+0
-13
third_party/gyp/test/scons_tools/tools.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 int main(int argc, char *argv[])
7 {
8 #ifdef THIS_TOOL
9 printf("Hello, world!\n");
10 #endif
11 return 0;
12 }
+0
-18
third_party/gyp/test/scons_tools/tools.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'tools',
8 'type': 'executable',
9 'sources': [
10 'tools.c',
11 ],
12 },
13 ],
14 'scons_settings': {
15 'tools': ['default', 'this_tool'],
16 },
17 }
+0
-39
third_party/gyp/test/sibling/gyptest-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('build/all.gyp', chdir='src')
14
15 test.build('build/all.gyp', test.ALL, chdir='src')
16
17 chdir = 'src/build'
18
19 # The top-level Makefile is in the directory where gyp was run.
20 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
21 # file? What about when passing in multiple .gyp files? Would sub-project
22 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
23 if test.format == 'make':
24 chdir = 'src'
25
26 if test.format == 'xcode':
27 chdir = 'src/prog1'
28 test.run_built_executable('prog1',
29 chdir=chdir,
30 stdout="Hello from prog1.c\n")
31
32 if test.format == 'xcode':
33 chdir = 'src/prog2'
34 test.run_built_executable('prog2',
35 chdir=chdir,
36 stdout="Hello from prog2.c\n")
37
38 test.pass_test()
+0
-41
third_party/gyp/test/sibling/gyptest-relocate.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 """
8
9 import TestGyp
10
11 test = TestGyp.TestGyp()
12
13 test.run_gyp('build/all.gyp', chdir='src')
14
15 test.relocate('src', 'relocate/src')
16
17 test.build('build/all.gyp', test.ALL, chdir='relocate/src')
18
19 chdir = 'relocate/src/build'
20
21 # The top-level Makefile is in the directory where gyp was run.
22 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
23 # file? What about when passing in multiple .gyp files? Would sub-project
24 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
25 if test.format == 'make':
26 chdir = 'relocate/src'
27
28 if test.format == 'xcode':
29 chdir = 'relocate/src/prog1'
30 test.run_built_executable('prog1',
31 chdir=chdir,
32 stdout="Hello from prog1.c\n")
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/prog2'
36 test.run_built_executable('prog2',
37 chdir=chdir,
38 stdout="Hello from prog2.c\n")
39
40 test.pass_test()
+0
-17
third_party/gyp/test/sibling/src/build/all.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 # TODO(sgk): a target name of 'all' leads to a scons dependency cycle
8 'target_name': 'All',
9 'type': 'none',
10 'dependencies': [
11 '../prog1/prog1.gyp:*',
12 '../prog2/prog2.gyp:*',
13 ],
14 },
15 ],
16 }
+0
-7
third_party/gyp/test/sibling/src/prog1/prog1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog1.c\n");
5 return 0;
6 }
+0
-15
third_party/gyp/test/sibling/src/prog1/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'sources': [
10 'prog1.c',
11 ],
12 },
13 ],
14 }
+0
-7
third_party/gyp/test/sibling/src/prog2/prog2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog2.c\n");
5 return 0;
6 }
+0
-15
third_party/gyp/test/sibling/src/prog2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog2',
8 'type': 'executable',
9 'sources': [
10 'prog2.c',
11 ],
12 },
13 ],
14 }
+0
-36
third_party/gyp/test/subdirectory/gyptest-SYMROOT-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 The configuration sets the Xcode SYMROOT variable and uses --depth=
13 to make Xcode behave like the other build tools--that is, put all
14 built targets in a single output build directory at the top of the tree.
15 """
16
17 import TestGyp
18
19 test = TestGyp.TestGyp()
20
21 test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
22
23 test.relocate('src', 'relocate/src')
24
25 # Suppress the test infrastructure's setting SYMROOT on the command line.
26 test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
27
28 test.run_built_executable('prog1',
29 stdout="Hello from prog1.c\n",
30 chdir='relocate/src')
31 test.run_built_executable('prog2',
32 stdout="Hello from prog2.c\n",
33 chdir='relocate/src')
34
35 test.pass_test()
+0
-37
third_party/gyp/test/subdirectory/gyptest-SYMROOT-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 The configuration sets the Xcode SYMROOT variable and uses --depth=
13 to make Xcode behave like the other build tools--that is, put all
14 built targets in a single output build directory at the top of the tree.
15 """
16
17 import TestGyp
18
19 test = TestGyp.TestGyp()
20
21 test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
22
23 test.relocate('src', 'relocate/src')
24
25 # Suppress the test infrastructure's setting SYMROOT on the command line.
26 test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
27
28 test.run_built_executable('prog1',
29 stdout="Hello from prog1.c\n",
30 chdir='relocate/src')
31
32 test.run_built_executable('prog2',
33 stdout="Hello from prog2.c\n",
34 chdir='relocate/src')
35
36 test.pass_test()
+0
-33
third_party/gyp/test/subdirectory/gyptest-subdir-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a subsidiary dependent target from a .gyp file in a
8 subdirectory, without specifying an explicit output build directory,
9 and using the subdirectory's solution or project file as the entry point.
10 """
11
12 import TestGyp
13 import errno
14
15 test = TestGyp.TestGyp()
16
17 test.run_gyp('prog1.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 chdir = 'relocate/src/subdir'
22 target = test.ALL
23
24 test.build('prog2.gyp', target, chdir=chdir)
25
26 test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
27
28 test.run_built_executable('prog2',
29 chdir=chdir,
30 stdout="Hello from prog2.c\n")
31
32 test.pass_test()
+0
-32
third_party/gyp/test/subdirectory/gyptest-subdir-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a subsidiary dependent target from a .gyp file in a
8 subdirectory, without specifying an explicit output build directory,
9 and using the subdirectory's solution or project file as the entry point.
10 """
11
12 import TestGyp
13 import errno
14
15 test = TestGyp.TestGyp()
16
17 test.run_gyp('prog1.gyp', chdir='src')
18
19 test.relocate('src', 'relocate/src')
20
21 chdir = 'relocate/src/subdir'
22
23 test.build('prog2.gyp', chdir=chdir)
24
25 test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
26
27 test.run_built_executable('prog2',
28 chdir=chdir,
29 stdout="Hello from prog2.c\n")
30
31 test.pass_test()
+0
-25
third_party/gyp/test/subdirectory/gyptest-subdir2-deep.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a project rooted several layers under src_dir works.
8 """
9
10 import TestGyp
11
12 test = TestGyp.TestGyp()
13
14 test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2')
15
16 test.relocate('src', 'relocate/src')
17
18 test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2')
19
20 test.run_built_executable('prog3',
21 chdir='relocate/src/subdir/subdir2',
22 stdout="Hello from prog3.c\n")
23
24 test.pass_test()
+0
-43
third_party/gyp/test/subdirectory/gyptest-top-all.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 There is a difference here in the default behavior of the underlying
13 build tools. Specifically, when building the entire "solution", Xcode
14 puts the output of each project relative to the .xcodeproj directory,
15 while Visual Studio (and our implementations of SCons and Make) put it
16 in a build directory relative to the "solution"--that is, the entry-point
17 from which you built the entire tree.
18 """
19
20 import TestGyp
21
22 test = TestGyp.TestGyp()
23
24 test.run_gyp('prog1.gyp', chdir='src')
25
26 test.relocate('src', 'relocate/src')
27
28 test.build('prog1.gyp', test.ALL, chdir='relocate/src')
29
30 test.run_built_executable('prog1',
31 stdout="Hello from prog1.c\n",
32 chdir='relocate/src')
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/subdir'
36 else:
37 chdir = 'relocate/src'
38 test.run_built_executable('prog2',
39 chdir=chdir,
40 stdout="Hello from prog2.c\n")
41
42 test.pass_test()
+0
-43
third_party/gyp/test/subdirectory/gyptest-top-default.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a target and a subsidiary dependent target from a
8 .gyp file in a subdirectory, without specifying an explicit output build
9 directory, and using the generated solution or project file at the top
10 of the tree as the entry point.
11
12 There is a difference here in the default behavior of the underlying
13 build tools. Specifically, when building the entire "solution", Xcode
14 puts the output of each project relative to the .xcodeproj directory,
15 while Visual Studio (and our implementations of SCons and Make) put it
16 in a build directory relative to the "solution"--that is, the entry-point
17 from which you built the entire tree.
18 """
19
20 import TestGyp
21
22 test = TestGyp.TestGyp()
23
24 test.run_gyp('prog1.gyp', chdir='src')
25
26 test.relocate('src', 'relocate/src')
27
28 test.build('prog1.gyp', chdir='relocate/src')
29
30 test.run_built_executable('prog1',
31 stdout="Hello from prog1.c\n",
32 chdir='relocate/src')
33
34 if test.format == 'xcode':
35 chdir = 'relocate/src/subdir'
36 else:
37 chdir = 'relocate/src'
38 test.run_built_executable('prog2',
39 chdir=chdir,
40 stdout="Hello from prog2.c\n")
41
42 test.pass_test()
+0
-7
third_party/gyp/test/subdirectory/src/prog1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog1.c\n");
5 return 0;
6 }
+0
-21
third_party/gyp/test/subdirectory/src/prog1.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 'symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog1',
11 'type': 'executable',
12 'dependencies': [
13 'subdir/prog2.gyp:prog2',
14 ],
15 'sources': [
16 'prog1.c',
17 ],
18 },
19 ],
20 }
+0
-7
third_party/gyp/test/subdirectory/src/subdir/prog2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog2.c\n");
5 return 0;
6 }
+0
-18
third_party/gyp/test/subdirectory/src/subdir/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog2',
11 'type': 'executable',
12 'sources': [
13 'prog2.c',
14 ],
15 },
16 ],
17 }
+0
-7
third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog3.c\n");
5 return 0;
6 }
+0
-18
third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'includes': [
6 '../../symroot.gypi',
7 ],
8 'targets': [
9 {
10 'target_name': 'prog3',
11 'type': 'executable',
12 'sources': [
13 'prog3.c',
14 ],
15 },
16 ],
17 }
+0
-16
third_party/gyp/test/subdirectory/src/symroot.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'variables': {
6 'set_symroot%': 0,
7 },
8 'conditions': [
9 ['set_symroot == 1', {
10 'xcode_settings': {
11 'SYMROOT': '<(DEPTH)/build',
12 },
13 }],
14 ],
15 }
+0
-23
third_party/gyp/test/toolsets/gyptest-toolsets.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies that toolsets are correctly applied
8 """
9
10 import TestGyp
11
12 # Multiple toolsets are currently only supported by the make generator.
13 test = TestGyp.TestGyp(formats=['make'])
14
15 test.run_gyp('toolsets.gyp')
16
17 test.build('toolsets.gyp', test.ALL)
18
19 test.run_built_executable('host-main', stdout="Host\n")
20 test.run_built_executable('target-main', stdout="Target\n")
21
22 test.pass_test()
+0
-11
third_party/gyp/test/toolsets/main.cc less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 #include <stdio.h>
5
6 const char *GetToolset();
7
8 int main(int argc, char *argv[]) {
9 printf("%s\n", GetToolset());
10 }
+0
-11
third_party/gyp/test/toolsets/toolsets.cc less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 const char *GetToolset() {
5 #ifdef TARGET
6 return "Target";
7 #else
8 return "Host";
9 #endif
10 }
+0
-38
third_party/gyp/test/toolsets/toolsets.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'target_defaults': {
6 'target_conditions': [
7 ['_toolset=="target"', {'defines': ['TARGET']}]
8 ]
9 },
10 'targets': [
11 {
12 'target_name': 'toolsets',
13 'type': 'static_library',
14 'toolsets': ['target', 'host'],
15 'sources': [
16 'toolsets.cc',
17 ],
18 },
19 {
20 'target_name': 'host-main',
21 'type': 'executable',
22 'toolsets': ['host'],
23 'dependencies': ['toolsets'],
24 'sources': [
25 'main.cc',
26 ],
27 },
28 {
29 'target_name': 'target-main',
30 'type': 'executable',
31 'dependencies': ['toolsets'],
32 'sources': [
33 'main.cc',
34 ],
35 },
36 ],
37 }
+0
-31
third_party/gyp/test/toplevel-dir/gyptest-toplevel-dir.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verifies building a subsidiary dependent target from a .gyp file in a
8 subdirectory, without specifying an explicit output build directory,
9 and using the subdirectory's solution or project file as the entry point.
10 """
11
12 import TestGyp
13 import errno
14
15 test = TestGyp.TestGyp(formats=['make'])
16
17 # We want our Makefile to be one dir up from main.gyp.
18 test.run_gyp('main.gyp', '--toplevel-dir=..', chdir='src/sub1')
19
20 toplevel_dir = 'src'
21
22 test.build('all', chdir=toplevel_dir)
23
24 test.built_file_must_exist('prog1', type=test.EXECUTABLE, chdir=toplevel_dir)
25
26 test.run_built_executable('prog1',
27 chdir=toplevel_dir,
28 stdout="Hello from prog1.c\n")
29
30 test.pass_test()
+0
-18
third_party/gyp/test/toplevel-dir/src/sub1/main.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog1',
8 'type': 'executable',
9 'dependencies': [
10 '<(DEPTH)/../sub2/prog2.gyp:prog2',
11 ],
12 'sources': [
13 'prog1.c',
14 ],
15 },
16 ],
17 }
+0
-7
third_party/gyp/test/toplevel-dir/src/sub1/prog1.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog1.c\n");
5 return 0;
6 }
+0
-7
third_party/gyp/test/toplevel-dir/src/sub2/prog2.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 printf("Hello from prog2.c\n");
5 return 0;
6 }
+0
-15
third_party/gyp/test/toplevel-dir/src/sub2/prog2.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'prog2',
8 'type': 'executable',
9 'sources': [
10 'prog2.c',
11 ],
12 },
13 ],
14 }
+0
-128
third_party/gyp/test/variables/commands/commands-repeated.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a simple test file to make sure that variable substitution
5 # happens correctly. Run "run_tests.py" using python to generate the
6 # output from this gyp file.
7
8 {
9 'variables': {
10 'pi': 'import math; print math.pi',
11 'third_letters': "<(other_letters)HIJK",
12 'letters_list': 'ABCD',
13 'other_letters': '<(letters_list)EFG',
14 'check_included': '<(included_variable)',
15 'check_lists': [
16 '<(included_variable)',
17 '<(third_letters)',
18 ],
19 'check_int': 5,
20 'check_str_int': '6',
21 'check_list_int': [
22 7,
23 '8',
24 9,
25 ],
26 'not_int_1': ' 10',
27 'not_int_2': '11 ',
28 'not_int_3': '012',
29 'not_int_4': '13.0',
30 'not_int_5': '+14',
31 'negative_int': '-15',
32 'zero_int': '0',
33 },
34 'includes': [
35 'commands.gypi',
36 ],
37 'targets': [
38 {
39 'target_name': 'foo',
40 'type': 'none',
41 'variables': {
42 'var1': '<!(["python", "-c", "<(pi)"])',
43 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
44 'var3': '<!(python -c "print \'<(letters_list)\'")',
45 'var4': '<(<!(python -c "print \'letters_list\'"))',
46 'var5': 'letters_',
47 'var6': 'list',
48 'var7': '<(check_int)',
49 'var8': '<(check_int)blah',
50 'var9': '<(check_str_int)',
51 'var10': '<(check_list_int)',
52 'var11': ['<@(check_list_int)'],
53 'var12': '<(not_int_1)',
54 'var13': '<(not_int_2)',
55 'var14': '<(not_int_3)',
56 'var15': '<(not_int_4)',
57 'var16': '<(not_int_5)',
58 'var17': '<(negative_int)',
59 'var18': '<(zero_int)',
60 # A second set with different names to make sure they only execute the
61 # commands once.
62 'var1prime': '<!(["python", "-c", "<(pi)"])',
63 'var2prime': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
64 'var3prime': '<!(python -c "print \'<(letters_list)\'")',
65 'var4prime': '<(<!(python -c "print \'letters_list\'"))',
66 },
67 'actions': [
68 {
69 'action_name': 'test_action',
70 'variables': {
71 'var7': '<!(echo <(var5)<(var6))',
72 },
73 'inputs' : [
74 '<(var2)',
75 ],
76 'outputs': [
77 '<(var4)',
78 '<(var7)',
79 ],
80 'action': [
81 'echo',
82 '<(_inputs)',
83 '<(_outputs)',
84 ],
85 },
86 # Again with the same vars to make sure the right things happened.
87 {
88 'action_name': 'test_action_prime',
89 'variables': {
90 'var7': '<!(echo <(var5)<(var6))',
91 },
92 'inputs' : [
93 '<(var2)',
94 ],
95 'outputs': [
96 '<(var4)',
97 '<(var7)',
98 ],
99 'action': [
100 'echo',
101 '<(_inputs)',
102 '<(_outputs)',
103 ],
104 },
105 # And one more time with the other vars...
106 {
107 'action_name': 'test_action_prime_prime',
108 'variables': {
109 'var7': '<!(echo <(var5)<(var6))',
110 },
111 'inputs' : [
112 '<(var2prime)',
113 ],
114 'outputs': [
115 '<(var4prime)',
116 '<(var7)',
117 ],
118 'action': [
119 'echo',
120 '<(_inputs)',
121 '<(_outputs)',
122 ],
123 },
124 ],
125 },
126 ],
127 }
+0
-405
third_party/gyp/test/variables/commands/commands-repeated.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: check: None
2 GENERAL: circular_check: True
3 GENERAL: debug: ['variables', 'general']
4 GENERAL: defines: None
5 GENERAL: depth: '.'
6 GENERAL: formats: ['gypd']
7 GENERAL: generator_flags: []
8 GENERAL: generator_output: None
9 GENERAL: includes: None
10 GENERAL: msvs_version: None
11 GENERAL: suffix: ''
12 GENERAL: toplevel_dir: None
13 GENERAL: use_environment: True
14 GENERAL: cmdline_default_variables: {}
15 GENERAL: generator_flags: {}
16 VARIABLES: Expanding '0' to 0
17 VARIABLES: Expanding '11 ' to '11 '
18 VARIABLES: Expanding '+14' to '+14'
19 VARIABLES: Expanding '-15' to -15
20 VARIABLES: Expanding ' 10' to ' 10'
21 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
22 VARIABLES: Expanding 'letters_list' to 'letters_list'
23 VARIABLES: Found output 'ABCDEFG', recursing.
24 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
25 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
26 VARIABLES: Expanding '012' to '012'
27 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
28 VARIABLES: Expanding 'other_letters' to 'other_letters'
29 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
30 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
31 VARIABLES: Expanding 'letters_list' to 'letters_list'
32 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
33 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
36 VARIABLES: Expanding 'XYZ' to 'XYZ'
37 VARIABLES: Expanding 'ABCD' to 'ABCD'
38 VARIABLES: Expanding '13.0' to '13.0'
39 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
40 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
41 VARIABLES: Expanding 'included_variable' to 'included_variable'
42 VARIABLES: Found output 'XYZ', recursing.
43 VARIABLES: Expanding 'XYZ' to 'XYZ'
44 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
45 VARIABLES: Expanding '6' to 6
46 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
47 VARIABLES: Expanding 'included_variable' to 'included_variable'
48 VARIABLES: Found output 'XYZ', recursing.
49 VARIABLES: Expanding 'XYZ' to 'XYZ'
50 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
51 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
52 VARIABLES: Expanding 'third_letters' to 'third_letters'
53 VARIABLES: Found output '<(other_letters)HIJK', recursing.
54 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
55 VARIABLES: Expanding 'other_letters' to 'other_letters'
56 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
57 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
58 VARIABLES: Expanding 'letters_list' to 'letters_list'
59 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
60 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
64 VARIABLES: Expanding '8' to 8
65 VARIABLES: Expanding '.' to '.'
66 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
67 VARIABLES: Expanding 'letters_list' to 'letters_list'
68 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
69 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
70 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
71 VARIABLES: Expanding 'pi' to 'pi'
72 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
73 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
74 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
75 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
76 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
77 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
78 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
79 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
80 VARIABLES: Found output '3.14159265359 ABCD', recursing.
81 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
82 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
83 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
84 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
85 VARIABLES: Expanding 'pi' to 'pi'
86 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
87 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
89 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
90 VARIABLES: Found output '3.14159265359', recursing.
91 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
92 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
93 VARIABLES: Expanding 'letters_' to 'letters_'
94 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
95 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
96 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
97 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
98 VARIABLES: Found output 'letters_list', recursing.
99 VARIABLES: Expanding 'letters_list' to 'letters_list'
100 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
101 VARIABLES: Found output 'ABCD', recursing.
102 VARIABLES: Expanding 'ABCD' to 'ABCD'
103 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
104 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
105 VARIABLES: Expanding 'check_int' to 'check_int'
106 VARIABLES: Found output '5', recursing.
107 VARIABLES: Expanding '5' to 5
108 VARIABLES: Expanding '<(check_int)' to 5
109 VARIABLES: Expanding 'list' to 'list'
110 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
111 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
112 VARIABLES: Expanding 'pi' to 'pi'
113 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
114 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
115 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
116 VARIABLES: Had cache value for command '['python', '-c', 'import math; print math.pi']' in directory 'None'
117 VARIABLES: Found output '3.14159265359', recursing.
118 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
119 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
120 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
121 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
122 VARIABLES: Expanding 'letters_list' to 'letters_list'
123 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
124 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
125 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
126 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
127 VARIABLES: Found output 'ABCD', recursing.
128 VARIABLES: Expanding 'ABCD' to 'ABCD'
129 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
130 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
131 VARIABLES: Expanding 'letters_list' to 'letters_list'
132 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
133 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
134 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
135 VARIABLES: Expanding 'pi' to 'pi'
136 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
137 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
138 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
139 VARIABLES: Had cache value for command 'python -c "import math; print math.pi"' in directory 'None'
140 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
141 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
142 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
143 VARIABLES: Had cache value for command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
144 VARIABLES: Found output '3.14159265359 ABCD', recursing.
145 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
146 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
147 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
148 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
149 VARIABLES: Found output '6', recursing.
150 VARIABLES: Expanding '6' to 6
151 VARIABLES: Expanding '<(check_str_int)' to 6
152 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
153 VARIABLES: Expanding 'check_int' to 'check_int'
154 VARIABLES: Found output '5blah', recursing.
155 VARIABLES: Expanding '5blah' to '5blah'
156 VARIABLES: Expanding '<(check_int)blah' to '5blah'
157 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
158 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
159 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
160 VARIABLES: Had cache value for command 'python -c "print 'letters_list'"' in directory 'None'
161 VARIABLES: Found output 'letters_list', recursing.
162 VARIABLES: Expanding 'letters_list' to 'letters_list'
163 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
164 VARIABLES: Found output 'ABCD', recursing.
165 VARIABLES: Expanding 'ABCD' to 'ABCD'
166 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
167 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
168 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
169 VARIABLES: Expanding 'letters_list' to 'letters_list'
170 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
171 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
172 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
173 VARIABLES: Had cache value for command 'python -c "print 'ABCD'"' in directory 'None'
174 VARIABLES: Found output 'ABCD', recursing.
175 VARIABLES: Expanding 'ABCD' to 'ABCD'
176 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
177 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
178 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
179 VARIABLES: Found output '13.0', recursing.
180 VARIABLES: Expanding '13.0' to '13.0'
181 VARIABLES: Expanding '<(not_int_4)' to '13.0'
182 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
183 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
184 VARIABLES: Found output '012', recursing.
185 VARIABLES: Expanding '012' to '012'
186 VARIABLES: Expanding '<(not_int_3)' to '012'
187 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
188 VARIABLES: Expanding 'negative_int' to 'negative_int'
189 VARIABLES: Found output '-15', recursing.
190 VARIABLES: Expanding '-15' to -15
191 VARIABLES: Expanding '<(negative_int)' to -15
192 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
193 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
194 VARIABLES: Found output '+14', recursing.
195 VARIABLES: Expanding '+14' to '+14'
196 VARIABLES: Expanding '<(not_int_5)' to '+14'
197 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
198 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
199 VARIABLES: Found output '7 8 9', recursing.
200 VARIABLES: Expanding '7 8 9' to '7 8 9'
201 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
202 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
203 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
204 VARIABLES: Found output '11 ', recursing.
205 VARIABLES: Expanding '11 ' to '11 '
206 VARIABLES: Expanding '<(not_int_2)' to '11 '
207 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
208 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
209 VARIABLES: Found output ' 10', recursing.
210 VARIABLES: Expanding ' 10' to ' 10'
211 VARIABLES: Expanding '<(not_int_1)' to ' 10'
212 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
213 VARIABLES: Expanding 'zero_int' to 'zero_int'
214 VARIABLES: Found output '0', recursing.
215 VARIABLES: Expanding '0' to 0
216 VARIABLES: Expanding '<(zero_int)' to 0
217 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
218 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
219 VARIABLES: Found output [7, 8, 9], recursing.
220 VARIABLES: Expanding 7 to 7
221 VARIABLES: Expanding 8 to 8
222 VARIABLES: Expanding 9 to 9
223 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
224 VARIABLES: Expanding 'foo' to 'foo'
225 VARIABLES: Expanding 'target' to 'target'
226 VARIABLES: Expanding 'none' to 'none'
227 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
228 VARIABLES: Expanding 'var6' to 'var6'
229 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
230 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
231 VARIABLES: Expanding 'var5' to 'var5'
232 VARIABLES: Found output 'echo letters_list', recursing.
233 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
234 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
235 VARIABLES: Executing command 'echo letters_list' in directory 'None'
236 VARIABLES: Found output 'letters_list', recursing.
237 VARIABLES: Expanding 'letters_list' to 'letters_list'
238 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
239 VARIABLES: Expanding 'test_action' to 'test_action'
240 VARIABLES: Expanding 'echo' to 'echo'
241 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
242 VARIABLES: Expanding '_inputs' to '_inputs'
243 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
244 VARIABLES: Expanding 'var2' to 'var2'
245 VARIABLES: Found output '3.14159265359 ABCD', recursing.
246 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
247 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
248 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
249 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
250 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
251 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
252 VARIABLES: Expanding '_outputs' to '_outputs'
253 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
254 VARIABLES: Expanding 'var4' to 'var4'
255 VARIABLES: Found output 'ABCD', recursing.
256 VARIABLES: Expanding 'ABCD' to 'ABCD'
257 VARIABLES: Expanding '<(var4)' to 'ABCD'
258 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
259 VARIABLES: Expanding 'var7' to 'var7'
260 VARIABLES: Found output 'letters_list', recursing.
261 VARIABLES: Expanding 'letters_list' to 'letters_list'
262 VARIABLES: Expanding '<(var7)' to 'letters_list'
263 VARIABLES: Found output 'ABCD letters_list', recursing.
264 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
265 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
266 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
267 VARIABLES: Expanding 'ABCD' to 'ABCD'
268 VARIABLES: Expanding 'letters_list' to 'letters_list'
269 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
270 VARIABLES: Expanding 'var6' to 'var6'
271 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
272 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
273 VARIABLES: Expanding 'var5' to 'var5'
274 VARIABLES: Found output 'echo letters_list', recursing.
275 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
276 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
277 VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
278 VARIABLES: Found output 'letters_list', recursing.
279 VARIABLES: Expanding 'letters_list' to 'letters_list'
280 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
281 VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
282 VARIABLES: Expanding 'echo' to 'echo'
283 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
284 VARIABLES: Expanding '_inputs' to '_inputs'
285 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
286 VARIABLES: Expanding 'var2' to 'var2'
287 VARIABLES: Found output '3.14159265359 ABCD', recursing.
288 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
289 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
290 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
291 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
292 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
293 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
294 VARIABLES: Expanding '_outputs' to '_outputs'
295 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
296 VARIABLES: Expanding 'var4' to 'var4'
297 VARIABLES: Found output 'ABCD', recursing.
298 VARIABLES: Expanding 'ABCD' to 'ABCD'
299 VARIABLES: Expanding '<(var4)' to 'ABCD'
300 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
301 VARIABLES: Expanding 'var7' to 'var7'
302 VARIABLES: Found output 'letters_list', recursing.
303 VARIABLES: Expanding 'letters_list' to 'letters_list'
304 VARIABLES: Expanding '<(var7)' to 'letters_list'
305 VARIABLES: Found output 'ABCD letters_list', recursing.
306 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
307 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
308 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
309 VARIABLES: Expanding 'ABCD' to 'ABCD'
310 VARIABLES: Expanding 'letters_list' to 'letters_list'
311 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
312 VARIABLES: Expanding 'var6' to 'var6'
313 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
314 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
315 VARIABLES: Expanding 'var5' to 'var5'
316 VARIABLES: Found output 'echo letters_list', recursing.
317 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
318 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
319 VARIABLES: Had cache value for command 'echo letters_list' in directory 'None'
320 VARIABLES: Found output 'letters_list', recursing.
321 VARIABLES: Expanding 'letters_list' to 'letters_list'
322 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
323 VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
324 VARIABLES: Expanding 'echo' to 'echo'
325 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
326 VARIABLES: Expanding '_inputs' to '_inputs'
327 VARIABLES: Matches: {'content': 'var2prime', 'is_array': '', 'type': '<', 'replace': '<(var2prime)'}
328 VARIABLES: Expanding 'var2prime' to 'var2prime'
329 VARIABLES: Found output '3.14159265359 ABCD', recursing.
330 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
331 VARIABLES: Expanding '<(var2prime)' to '3.14159265359 ABCD'
332 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
333 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
334 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
335 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
336 VARIABLES: Expanding '_outputs' to '_outputs'
337 VARIABLES: Matches: {'content': 'var4prime', 'is_array': '', 'type': '<', 'replace': '<(var4prime)'}
338 VARIABLES: Expanding 'var4prime' to 'var4prime'
339 VARIABLES: Found output 'ABCD', recursing.
340 VARIABLES: Expanding 'ABCD' to 'ABCD'
341 VARIABLES: Expanding '<(var4prime)' to 'ABCD'
342 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
343 VARIABLES: Expanding 'var7' to 'var7'
344 VARIABLES: Found output 'letters_list', recursing.
345 VARIABLES: Expanding 'letters_list' to 'letters_list'
346 VARIABLES: Expanding '<(var7)' to 'letters_list'
347 VARIABLES: Found output 'ABCD letters_list', recursing.
348 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
349 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
350 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
351 VARIABLES: Expanding 'ABCD' to 'ABCD'
352 VARIABLES: Expanding 'letters_list' to 'letters_list'
353 VARIABLES: Expanding 'dummy' to 'dummy'
354 VARIABLES: Expanding 'target' to 'target'
355 VARIABLES: Expanding 'none' to 'none'
356 VARIABLES: Expanding 'commands-repeated.gyp' to 'commands-repeated.gyp'
357 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
358 VARIABLES: Expanding 'dummy' to 'dummy'
359 VARIABLES: Expanding 'target' to 'target'
360 VARIABLES: Expanding 'none' to 'none'
361 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
362 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
363 VARIABLES: Expanding 'letters_' to 'letters_'
364 VARIABLES: Expanding 'ABCD' to 'ABCD'
365 VARIABLES: Expanding 'list' to 'list'
366 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
367 VARIABLES: Expanding 'ABCD' to 'ABCD'
368 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
369 VARIABLES: Expanding '5blah' to '5blah'
370 VARIABLES: Expanding 'ABCD' to 'ABCD'
371 VARIABLES: Expanding 'ABCD' to 'ABCD'
372 VARIABLES: Expanding '13.0' to '13.0'
373 VARIABLES: Expanding '012' to '012'
374 VARIABLES: Expanding '+14' to '+14'
375 VARIABLES: Expanding '7 8 9' to '7 8 9'
376 VARIABLES: Expanding '11 ' to '11 '
377 VARIABLES: Expanding ' 10' to ' 10'
378 VARIABLES: Expanding 'foo' to 'foo'
379 VARIABLES: Expanding 'target' to 'target'
380 VARIABLES: Expanding 'none' to 'none'
381 VARIABLES: Expanding 'letters_list' to 'letters_list'
382 VARIABLES: Expanding 'test_action' to 'test_action'
383 VARIABLES: Expanding 'echo' to 'echo'
384 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
385 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
386 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
387 VARIABLES: Expanding 'ABCD' to 'ABCD'
388 VARIABLES: Expanding 'letters_list' to 'letters_list'
389 VARIABLES: Expanding 'letters_list' to 'letters_list'
390 VARIABLES: Expanding 'test_action_prime' to 'test_action_prime'
391 VARIABLES: Expanding 'echo' to 'echo'
392 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
393 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
394 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
395 VARIABLES: Expanding 'ABCD' to 'ABCD'
396 VARIABLES: Expanding 'letters_list' to 'letters_list'
397 VARIABLES: Expanding 'letters_list' to 'letters_list'
398 VARIABLES: Expanding 'test_action_prime_prime' to 'test_action_prime_prime'
399 VARIABLES: Expanding 'echo' to 'echo'
400 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
401 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
402 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
403 VARIABLES: Expanding 'ABCD' to 'ABCD'
404 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-72
third_party/gyp/test/variables/commands/commands-repeated.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['commands-repeated.gyp', 'commands.gypi'],
2 'targets': [{'actions': [{'action': ['echo',
3 '"3.14159265359 ABCD"',
4 'ABCD letters_list'],
5 'action_name': 'test_action',
6 'inputs': ['3.14159265359 ABCD'],
7 'outputs': ['ABCD', 'letters_list'],
8 'variables': {'var7': 'letters_list'}},
9 {'action': ['echo',
10 '"3.14159265359 ABCD"',
11 'ABCD letters_list'],
12 'action_name': 'test_action_prime',
13 'inputs': ['3.14159265359 ABCD'],
14 'outputs': ['ABCD', 'letters_list'],
15 'variables': {'var7': 'letters_list'}},
16 {'action': ['echo',
17 '"3.14159265359 ABCD"',
18 'ABCD letters_list'],
19 'action_name': 'test_action_prime_prime',
20 'inputs': ['3.14159265359 ABCD'],
21 'outputs': ['ABCD', 'letters_list'],
22 'variables': {'var7': 'letters_list'}}],
23 'configurations': {'Default': {}},
24 'default_configuration': 'Default',
25 'target_name': 'foo',
26 'toolset': 'target',
27 'type': 'none',
28 'variables': {'var1': '3.14159265359',
29 'var10': '7 8 9',
30 'var11': ['7', '8', '9'],
31 'var12': ' 10',
32 'var13': '11 ',
33 'var14': '012',
34 'var15': '13.0',
35 'var16': '+14',
36 'var17': '-15',
37 'var18': '0',
38 'var1prime': '3.14159265359',
39 'var2': '3.14159265359 ABCD',
40 'var2prime': '3.14159265359 ABCD',
41 'var3': 'ABCD',
42 'var3prime': 'ABCD',
43 'var4': 'ABCD',
44 'var4prime': 'ABCD',
45 'var5': 'letters_',
46 'var6': 'list',
47 'var7': '5',
48 'var8': '5blah',
49 'var9': '6'}},
50 {'configurations': {'Default': {}},
51 'default_configuration': 'Default',
52 'target_name': 'dummy',
53 'toolset': 'target',
54 'type': 'none'}],
55 'variables': {'check_included': 'XYZ',
56 'check_int': '5',
57 'check_list_int': ['7', '8', '9'],
58 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
59 'check_str_int': '6',
60 'included_variable': 'XYZ',
61 'letters_list': 'ABCD',
62 'negative_int': '-15',
63 'not_int_1': ' 10',
64 'not_int_2': '11 ',
65 'not_int_3': '012',
66 'not_int_4': '13.0',
67 'not_int_5': '+14',
68 'other_letters': 'ABCDEFG',
69 'pi': 'import math; print math.pi',
70 'third_letters': 'ABCDEFGHIJK',
71 'zero_int': '0'}}
+0
-84
third_party/gyp/test/variables/commands/commands.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a simple test file to make sure that variable substitution
5 # happens correctly. Run "run_tests.py" using python to generate the
6 # output from this gyp file.
7
8 {
9 'variables': {
10 'pi': 'import math; print math.pi',
11 'third_letters': "<(other_letters)HIJK",
12 'letters_list': 'ABCD',
13 'other_letters': '<(letters_list)EFG',
14 'check_included': '<(included_variable)',
15 'check_lists': [
16 '<(included_variable)',
17 '<(third_letters)',
18 ],
19 'check_int': 5,
20 'check_str_int': '6',
21 'check_list_int': [
22 7,
23 '8',
24 9,
25 ],
26 'not_int_1': ' 10',
27 'not_int_2': '11 ',
28 'not_int_3': '012',
29 'not_int_4': '13.0',
30 'not_int_5': '+14',
31 'negative_int': '-15',
32 'zero_int': '0',
33 },
34 'includes': [
35 'commands.gypi',
36 ],
37 'targets': [
38 {
39 'target_name': 'foo',
40 'type': 'none',
41 'variables': {
42 'var1': '<!(["python", "-c", "<(pi)"])',
43 'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
44 'var3': '<!(python -c "print \'<(letters_list)\'")',
45 'var4': '<(<!(python -c "print \'letters_list\'"))',
46 'var5': 'letters_',
47 'var6': 'list',
48 'var7': '<(check_int)',
49 'var8': '<(check_int)blah',
50 'var9': '<(check_str_int)',
51 'var10': '<(check_list_int)',
52 'var11': ['<@(check_list_int)'],
53 'var12': '<(not_int_1)',
54 'var13': '<(not_int_2)',
55 'var14': '<(not_int_3)',
56 'var15': '<(not_int_4)',
57 'var16': '<(not_int_5)',
58 'var17': '<(negative_int)',
59 'var18': '<(zero_int)',
60 },
61 'actions': [
62 {
63 'action_name': 'test_action',
64 'variables': {
65 'var7': '<!(echo <(var5)<(var6))',
66 },
67 'inputs' : [
68 '<(var2)',
69 ],
70 'outputs': [
71 '<(var4)',
72 '<(var7)',
73 ],
74 'action': [
75 'echo',
76 '<(_inputs)',
77 '<(_outputs)',
78 ],
79 },
80 ],
81 },
82 ],
83 }
+0
-254
third_party/gyp/test/variables/commands/commands.gyp.ignore-env.stdout less more
0 GENERAL: running with these options:
1 GENERAL: check: None
2 GENERAL: circular_check: True
3 GENERAL: debug: ['variables', 'general']
4 GENERAL: defines: None
5 GENERAL: depth: '.'
6 GENERAL: formats: ['gypd']
7 GENERAL: generator_flags: []
8 GENERAL: generator_output: None
9 GENERAL: includes: None
10 GENERAL: msvs_version: None
11 GENERAL: suffix: ''
12 GENERAL: toplevel_dir: None
13 GENERAL: use_environment: False
14 GENERAL: cmdline_default_variables: {}
15 GENERAL: generator_flags: {}
16 VARIABLES: Expanding '0' to 0
17 VARIABLES: Expanding '11 ' to '11 '
18 VARIABLES: Expanding '+14' to '+14'
19 VARIABLES: Expanding '-15' to -15
20 VARIABLES: Expanding ' 10' to ' 10'
21 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
22 VARIABLES: Expanding 'letters_list' to 'letters_list'
23 VARIABLES: Found output 'ABCDEFG', recursing.
24 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
25 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
26 VARIABLES: Expanding '012' to '012'
27 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
28 VARIABLES: Expanding 'other_letters' to 'other_letters'
29 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
30 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
31 VARIABLES: Expanding 'letters_list' to 'letters_list'
32 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
33 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
36 VARIABLES: Expanding 'XYZ' to 'XYZ'
37 VARIABLES: Expanding 'ABCD' to 'ABCD'
38 VARIABLES: Expanding '13.0' to '13.0'
39 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
40 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
41 VARIABLES: Expanding 'included_variable' to 'included_variable'
42 VARIABLES: Found output 'XYZ', recursing.
43 VARIABLES: Expanding 'XYZ' to 'XYZ'
44 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
45 VARIABLES: Expanding '6' to 6
46 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
47 VARIABLES: Expanding 'included_variable' to 'included_variable'
48 VARIABLES: Found output 'XYZ', recursing.
49 VARIABLES: Expanding 'XYZ' to 'XYZ'
50 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
51 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
52 VARIABLES: Expanding 'third_letters' to 'third_letters'
53 VARIABLES: Found output '<(other_letters)HIJK', recursing.
54 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
55 VARIABLES: Expanding 'other_letters' to 'other_letters'
56 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
57 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
58 VARIABLES: Expanding 'letters_list' to 'letters_list'
59 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
60 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
64 VARIABLES: Expanding '8' to 8
65 VARIABLES: Expanding '.' to '.'
66 VARIABLES: Expanding 'letters_' to 'letters_'
67 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
68 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
69 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
70 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
71 VARIABLES: Found output 'letters_list', recursing.
72 VARIABLES: Expanding 'letters_list' to 'letters_list'
73 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
74 VARIABLES: Found output 'ABCD', recursing.
75 VARIABLES: Expanding 'ABCD' to 'ABCD'
76 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
77 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
78 VARIABLES: Expanding 'check_int' to 'check_int'
79 VARIABLES: Found output '5', recursing.
80 VARIABLES: Expanding '5' to 5
81 VARIABLES: Expanding '<(check_int)' to 5
82 VARIABLES: Expanding 'list' to 'list'
83 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
84 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
85 VARIABLES: Expanding 'pi' to 'pi'
86 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
87 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
89 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
90 VARIABLES: Found output '3.14159265359', recursing.
91 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
92 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
93 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
94 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
95 VARIABLES: Expanding 'letters_list' to 'letters_list'
96 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
97 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
98 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
99 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
100 VARIABLES: Found output 'ABCD', recursing.
101 VARIABLES: Expanding 'ABCD' to 'ABCD'
102 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
103 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
104 VARIABLES: Expanding 'letters_list' to 'letters_list'
105 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
106 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
107 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
108 VARIABLES: Expanding 'pi' to 'pi'
109 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
110 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
111 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
112 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
113 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
114 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
115 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
116 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
117 VARIABLES: Found output '3.14159265359 ABCD', recursing.
118 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
119 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
120 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
121 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
122 VARIABLES: Found output '6', recursing.
123 VARIABLES: Expanding '6' to 6
124 VARIABLES: Expanding '<(check_str_int)' to 6
125 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
126 VARIABLES: Expanding 'check_int' to 'check_int'
127 VARIABLES: Found output '5blah', recursing.
128 VARIABLES: Expanding '5blah' to '5blah'
129 VARIABLES: Expanding '<(check_int)blah' to '5blah'
130 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
131 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
132 VARIABLES: Found output '13.0', recursing.
133 VARIABLES: Expanding '13.0' to '13.0'
134 VARIABLES: Expanding '<(not_int_4)' to '13.0'
135 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
136 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
137 VARIABLES: Found output '012', recursing.
138 VARIABLES: Expanding '012' to '012'
139 VARIABLES: Expanding '<(not_int_3)' to '012'
140 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
141 VARIABLES: Expanding 'negative_int' to 'negative_int'
142 VARIABLES: Found output '-15', recursing.
143 VARIABLES: Expanding '-15' to -15
144 VARIABLES: Expanding '<(negative_int)' to -15
145 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
146 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
147 VARIABLES: Found output '+14', recursing.
148 VARIABLES: Expanding '+14' to '+14'
149 VARIABLES: Expanding '<(not_int_5)' to '+14'
150 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
151 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
152 VARIABLES: Found output '7 8 9', recursing.
153 VARIABLES: Expanding '7 8 9' to '7 8 9'
154 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
155 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
156 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
157 VARIABLES: Found output '11 ', recursing.
158 VARIABLES: Expanding '11 ' to '11 '
159 VARIABLES: Expanding '<(not_int_2)' to '11 '
160 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
161 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
162 VARIABLES: Found output ' 10', recursing.
163 VARIABLES: Expanding ' 10' to ' 10'
164 VARIABLES: Expanding '<(not_int_1)' to ' 10'
165 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
166 VARIABLES: Expanding 'zero_int' to 'zero_int'
167 VARIABLES: Found output '0', recursing.
168 VARIABLES: Expanding '0' to 0
169 VARIABLES: Expanding '<(zero_int)' to 0
170 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
171 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
172 VARIABLES: Found output [7, 8, 9], recursing.
173 VARIABLES: Expanding 7 to 7
174 VARIABLES: Expanding 8 to 8
175 VARIABLES: Expanding 9 to 9
176 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
177 VARIABLES: Expanding 'foo' to 'foo'
178 VARIABLES: Expanding 'target' to 'target'
179 VARIABLES: Expanding 'none' to 'none'
180 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
181 VARIABLES: Expanding 'var6' to 'var6'
182 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
183 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
184 VARIABLES: Expanding 'var5' to 'var5'
185 VARIABLES: Found output 'echo letters_list', recursing.
186 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
187 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
188 VARIABLES: Executing command 'echo letters_list' in directory 'None'
189 VARIABLES: Found output 'letters_list', recursing.
190 VARIABLES: Expanding 'letters_list' to 'letters_list'
191 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
192 VARIABLES: Expanding 'test_action' to 'test_action'
193 VARIABLES: Expanding 'echo' to 'echo'
194 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
195 VARIABLES: Expanding '_inputs' to '_inputs'
196 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
197 VARIABLES: Expanding 'var2' to 'var2'
198 VARIABLES: Found output '3.14159265359 ABCD', recursing.
199 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
200 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
201 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
202 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
203 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
204 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
205 VARIABLES: Expanding '_outputs' to '_outputs'
206 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
207 VARIABLES: Expanding 'var4' to 'var4'
208 VARIABLES: Found output 'ABCD', recursing.
209 VARIABLES: Expanding 'ABCD' to 'ABCD'
210 VARIABLES: Expanding '<(var4)' to 'ABCD'
211 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
212 VARIABLES: Expanding 'var7' to 'var7'
213 VARIABLES: Found output 'letters_list', recursing.
214 VARIABLES: Expanding 'letters_list' to 'letters_list'
215 VARIABLES: Expanding '<(var7)' to 'letters_list'
216 VARIABLES: Found output 'ABCD letters_list', recursing.
217 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
218 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
219 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
220 VARIABLES: Expanding 'ABCD' to 'ABCD'
221 VARIABLES: Expanding 'letters_list' to 'letters_list'
222 VARIABLES: Expanding 'dummy' to 'dummy'
223 VARIABLES: Expanding 'target' to 'target'
224 VARIABLES: Expanding 'none' to 'none'
225 VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
226 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
227 VARIABLES: Expanding 'dummy' to 'dummy'
228 VARIABLES: Expanding 'target' to 'target'
229 VARIABLES: Expanding 'none' to 'none'
230 VARIABLES: Expanding 'letters_' to 'letters_'
231 VARIABLES: Expanding 'ABCD' to 'ABCD'
232 VARIABLES: Expanding 'list' to 'list'
233 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
234 VARIABLES: Expanding 'ABCD' to 'ABCD'
235 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
236 VARIABLES: Expanding '5blah' to '5blah'
237 VARIABLES: Expanding '13.0' to '13.0'
238 VARIABLES: Expanding '012' to '012'
239 VARIABLES: Expanding '+14' to '+14'
240 VARIABLES: Expanding '7 8 9' to '7 8 9'
241 VARIABLES: Expanding '11 ' to '11 '
242 VARIABLES: Expanding ' 10' to ' 10'
243 VARIABLES: Expanding 'foo' to 'foo'
244 VARIABLES: Expanding 'target' to 'target'
245 VARIABLES: Expanding 'none' to 'none'
246 VARIABLES: Expanding 'letters_list' to 'letters_list'
247 VARIABLES: Expanding 'test_action' to 'test_action'
248 VARIABLES: Expanding 'echo' to 'echo'
249 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
250 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
251 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
252 VARIABLES: Expanding 'ABCD' to 'ABCD'
253 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-254
third_party/gyp/test/variables/commands/commands.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: check: None
2 GENERAL: circular_check: True
3 GENERAL: debug: ['variables', 'general']
4 GENERAL: defines: None
5 GENERAL: depth: '.'
6 GENERAL: formats: ['gypd']
7 GENERAL: generator_flags: []
8 GENERAL: generator_output: None
9 GENERAL: includes: None
10 GENERAL: msvs_version: None
11 GENERAL: suffix: ''
12 GENERAL: toplevel_dir: None
13 GENERAL: use_environment: True
14 GENERAL: cmdline_default_variables: {}
15 GENERAL: generator_flags: {}
16 VARIABLES: Expanding '0' to 0
17 VARIABLES: Expanding '11 ' to '11 '
18 VARIABLES: Expanding '+14' to '+14'
19 VARIABLES: Expanding '-15' to -15
20 VARIABLES: Expanding ' 10' to ' 10'
21 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
22 VARIABLES: Expanding 'letters_list' to 'letters_list'
23 VARIABLES: Found output 'ABCDEFG', recursing.
24 VARIABLES: Expanding 'ABCDEFG' to 'ABCDEFG'
25 VARIABLES: Expanding '<(letters_list)EFG' to 'ABCDEFG'
26 VARIABLES: Expanding '012' to '012'
27 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
28 VARIABLES: Expanding 'other_letters' to 'other_letters'
29 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
30 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
31 VARIABLES: Expanding 'letters_list' to 'letters_list'
32 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
33 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
34 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
35 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
36 VARIABLES: Expanding 'XYZ' to 'XYZ'
37 VARIABLES: Expanding 'ABCD' to 'ABCD'
38 VARIABLES: Expanding '13.0' to '13.0'
39 VARIABLES: Expanding 'import math; print math.pi' to 'import math; print math.pi'
40 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
41 VARIABLES: Expanding 'included_variable' to 'included_variable'
42 VARIABLES: Found output 'XYZ', recursing.
43 VARIABLES: Expanding 'XYZ' to 'XYZ'
44 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
45 VARIABLES: Expanding '6' to 6
46 VARIABLES: Matches: {'content': 'included_variable', 'is_array': '', 'type': '<', 'replace': '<(included_variable)'}
47 VARIABLES: Expanding 'included_variable' to 'included_variable'
48 VARIABLES: Found output 'XYZ', recursing.
49 VARIABLES: Expanding 'XYZ' to 'XYZ'
50 VARIABLES: Expanding '<(included_variable)' to 'XYZ'
51 VARIABLES: Matches: {'content': 'third_letters', 'is_array': '', 'type': '<', 'replace': '<(third_letters)'}
52 VARIABLES: Expanding 'third_letters' to 'third_letters'
53 VARIABLES: Found output '<(other_letters)HIJK', recursing.
54 VARIABLES: Matches: {'content': 'other_letters', 'is_array': '', 'type': '<', 'replace': '<(other_letters)'}
55 VARIABLES: Expanding 'other_letters' to 'other_letters'
56 VARIABLES: Found output '<(letters_list)EFGHIJK', recursing.
57 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
58 VARIABLES: Expanding 'letters_list' to 'letters_list'
59 VARIABLES: Found output 'ABCDEFGHIJK', recursing.
60 VARIABLES: Expanding 'ABCDEFGHIJK' to 'ABCDEFGHIJK'
61 VARIABLES: Expanding '<(letters_list)EFGHIJK' to 'ABCDEFGHIJK'
62 VARIABLES: Expanding '<(other_letters)HIJK' to 'ABCDEFGHIJK'
63 VARIABLES: Expanding '<(third_letters)' to 'ABCDEFGHIJK'
64 VARIABLES: Expanding '8' to 8
65 VARIABLES: Expanding '.' to '.'
66 VARIABLES: Expanding 'letters_' to 'letters_'
67 VARIABLES: Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'type': '<', 'replace': '<(<!(python -c "print \'letters_list\'")'}
68 VARIABLES: Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'letters_list\'")'}
69 VARIABLES: Expanding 'python -c "print \'letters_list\'"' to 'python -c "print \'letters_list\'"'
70 VARIABLES: Executing command 'python -c "print 'letters_list'"' in directory 'None'
71 VARIABLES: Found output 'letters_list', recursing.
72 VARIABLES: Expanding 'letters_list' to 'letters_list'
73 VARIABLES: Expanding '<!(python -c "print \'letters_list\'")' to 'letters_list'
74 VARIABLES: Found output 'ABCD', recursing.
75 VARIABLES: Expanding 'ABCD' to 'ABCD'
76 VARIABLES: Expanding '<(<!(python -c "print \'letters_list\'"))' to 'ABCD'
77 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
78 VARIABLES: Expanding 'check_int' to 'check_int'
79 VARIABLES: Found output '5', recursing.
80 VARIABLES: Expanding '5' to 5
81 VARIABLES: Expanding '<(check_int)' to 5
82 VARIABLES: Expanding 'list' to 'list'
83 VARIABLES: Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'type': '<!', 'replace': '<!(["python", "-c", "<(pi)'}
84 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
85 VARIABLES: Expanding 'pi' to 'pi'
86 VARIABLES: Found output '["python", "-c", "import math; print math.pi"]', recursing.
87 VARIABLES: Expanding '["python", "-c", "import math; print math.pi"]' to '["python", "-c", "import math; print math.pi"]'
88 VARIABLES: Expanding '["python", "-c", "<(pi)"]' to '["python", "-c", "import math; print math.pi"]'
89 VARIABLES: Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
90 VARIABLES: Found output '3.14159265359', recursing.
91 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
92 VARIABLES: Expanding '<!(["python", "-c", "<(pi)"])' to '3.14159265359'
93 VARIABLES: Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<(letters_list)'}
94 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
95 VARIABLES: Expanding 'letters_list' to 'letters_list'
96 VARIABLES: Found output 'python -c "print \'ABCD\'"', recursing.
97 VARIABLES: Expanding 'python -c "print \'ABCD\'"' to 'python -c "print \'ABCD\'"'
98 VARIABLES: Expanding 'python -c "print \'<(letters_list)\'"' to 'python -c "print \'ABCD\'"'
99 VARIABLES: Executing command 'python -c "print 'ABCD'"' in directory 'None'
100 VARIABLES: Found output 'ABCD', recursing.
101 VARIABLES: Expanding 'ABCD' to 'ABCD'
102 VARIABLES: Expanding '<!(python -c "print \'<(letters_list)\'")' to 'ABCD'
103 VARIABLES: Matches: {'content': 'letters_list', 'is_array': '', 'type': '<', 'replace': '<(letters_list)'}
104 VARIABLES: Expanding 'letters_list' to 'letters_list'
105 VARIABLES: Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "print \'<!(python -c "<(pi)'}
106 VARIABLES: Matches: {'content': 'python -c "<(pi', 'is_array': '', 'type': '<!', 'replace': '<!(python -c "<(pi)'}
107 VARIABLES: Matches: {'content': 'pi', 'is_array': '', 'type': '<', 'replace': '<(pi)'}
108 VARIABLES: Expanding 'pi' to 'pi'
109 VARIABLES: Found output 'python -c "import math; print math.pi"', recursing.
110 VARIABLES: Expanding 'python -c "import math; print math.pi"' to 'python -c "import math; print math.pi"'
111 VARIABLES: Expanding 'python -c "<(pi)"' to 'python -c "import math; print math.pi"'
112 VARIABLES: Executing command 'python -c "import math; print math.pi"' in directory 'None'
113 VARIABLES: Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
114 VARIABLES: Expanding 'python -c "print \'3.14159265359 ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
115 VARIABLES: Expanding 'python -c "print \'<!(python -c "<(pi)") ABCD\'"' to 'python -c "print \'3.14159265359 ABCD\'"'
116 VARIABLES: Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
117 VARIABLES: Found output '3.14159265359 ABCD', recursing.
118 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
119 VARIABLES: Expanding '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")' to '3.14159265359 ABCD'
120 VARIABLES: Matches: {'content': 'check_str_int', 'is_array': '', 'type': '<', 'replace': '<(check_str_int)'}
121 VARIABLES: Expanding 'check_str_int' to 'check_str_int'
122 VARIABLES: Found output '6', recursing.
123 VARIABLES: Expanding '6' to 6
124 VARIABLES: Expanding '<(check_str_int)' to 6
125 VARIABLES: Matches: {'content': 'check_int', 'is_array': '', 'type': '<', 'replace': '<(check_int)'}
126 VARIABLES: Expanding 'check_int' to 'check_int'
127 VARIABLES: Found output '5blah', recursing.
128 VARIABLES: Expanding '5blah' to '5blah'
129 VARIABLES: Expanding '<(check_int)blah' to '5blah'
130 VARIABLES: Matches: {'content': 'not_int_4', 'is_array': '', 'type': '<', 'replace': '<(not_int_4)'}
131 VARIABLES: Expanding 'not_int_4' to 'not_int_4'
132 VARIABLES: Found output '13.0', recursing.
133 VARIABLES: Expanding '13.0' to '13.0'
134 VARIABLES: Expanding '<(not_int_4)' to '13.0'
135 VARIABLES: Matches: {'content': 'not_int_3', 'is_array': '', 'type': '<', 'replace': '<(not_int_3)'}
136 VARIABLES: Expanding 'not_int_3' to 'not_int_3'
137 VARIABLES: Found output '012', recursing.
138 VARIABLES: Expanding '012' to '012'
139 VARIABLES: Expanding '<(not_int_3)' to '012'
140 VARIABLES: Matches: {'content': 'negative_int', 'is_array': '', 'type': '<', 'replace': '<(negative_int)'}
141 VARIABLES: Expanding 'negative_int' to 'negative_int'
142 VARIABLES: Found output '-15', recursing.
143 VARIABLES: Expanding '-15' to -15
144 VARIABLES: Expanding '<(negative_int)' to -15
145 VARIABLES: Matches: {'content': 'not_int_5', 'is_array': '', 'type': '<', 'replace': '<(not_int_5)'}
146 VARIABLES: Expanding 'not_int_5' to 'not_int_5'
147 VARIABLES: Found output '+14', recursing.
148 VARIABLES: Expanding '+14' to '+14'
149 VARIABLES: Expanding '<(not_int_5)' to '+14'
150 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<', 'replace': '<(check_list_int)'}
151 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
152 VARIABLES: Found output '7 8 9', recursing.
153 VARIABLES: Expanding '7 8 9' to '7 8 9'
154 VARIABLES: Expanding '<(check_list_int)' to '7 8 9'
155 VARIABLES: Matches: {'content': 'not_int_2', 'is_array': '', 'type': '<', 'replace': '<(not_int_2)'}
156 VARIABLES: Expanding 'not_int_2' to 'not_int_2'
157 VARIABLES: Found output '11 ', recursing.
158 VARIABLES: Expanding '11 ' to '11 '
159 VARIABLES: Expanding '<(not_int_2)' to '11 '
160 VARIABLES: Matches: {'content': 'not_int_1', 'is_array': '', 'type': '<', 'replace': '<(not_int_1)'}
161 VARIABLES: Expanding 'not_int_1' to 'not_int_1'
162 VARIABLES: Found output ' 10', recursing.
163 VARIABLES: Expanding ' 10' to ' 10'
164 VARIABLES: Expanding '<(not_int_1)' to ' 10'
165 VARIABLES: Matches: {'content': 'zero_int', 'is_array': '', 'type': '<', 'replace': '<(zero_int)'}
166 VARIABLES: Expanding 'zero_int' to 'zero_int'
167 VARIABLES: Found output '0', recursing.
168 VARIABLES: Expanding '0' to 0
169 VARIABLES: Expanding '<(zero_int)' to 0
170 VARIABLES: Matches: {'content': 'check_list_int', 'is_array': '', 'type': '<@', 'replace': '<@(check_list_int)'}
171 VARIABLES: Expanding 'check_list_int' to 'check_list_int'
172 VARIABLES: Found output [7, 8, 9], recursing.
173 VARIABLES: Expanding 7 to 7
174 VARIABLES: Expanding 8 to 8
175 VARIABLES: Expanding 9 to 9
176 VARIABLES: Expanding '<@(check_list_int)' to [7, 8, 9]
177 VARIABLES: Expanding 'foo' to 'foo'
178 VARIABLES: Expanding 'target' to 'target'
179 VARIABLES: Expanding 'none' to 'none'
180 VARIABLES: Matches: {'content': 'var6', 'is_array': '', 'type': '<', 'replace': '<(var6)'}
181 VARIABLES: Expanding 'var6' to 'var6'
182 VARIABLES: Matches: {'content': 'echo <(var5', 'is_array': '', 'type': '<!', 'replace': '<!(echo <(var5)'}
183 VARIABLES: Matches: {'content': 'var5', 'is_array': '', 'type': '<', 'replace': '<(var5)'}
184 VARIABLES: Expanding 'var5' to 'var5'
185 VARIABLES: Found output 'echo letters_list', recursing.
186 VARIABLES: Expanding 'echo letters_list' to 'echo letters_list'
187 VARIABLES: Expanding 'echo <(var5)list' to 'echo letters_list'
188 VARIABLES: Executing command 'echo letters_list' in directory 'None'
189 VARIABLES: Found output 'letters_list', recursing.
190 VARIABLES: Expanding 'letters_list' to 'letters_list'
191 VARIABLES: Expanding '<!(echo <(var5)<(var6))' to 'letters_list'
192 VARIABLES: Expanding 'test_action' to 'test_action'
193 VARIABLES: Expanding 'echo' to 'echo'
194 VARIABLES: Matches: {'content': '_inputs', 'is_array': '', 'type': '<', 'replace': '<(_inputs)'}
195 VARIABLES: Expanding '_inputs' to '_inputs'
196 VARIABLES: Matches: {'content': 'var2', 'is_array': '', 'type': '<', 'replace': '<(var2)'}
197 VARIABLES: Expanding 'var2' to 'var2'
198 VARIABLES: Found output '3.14159265359 ABCD', recursing.
199 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
200 VARIABLES: Expanding '<(var2)' to '3.14159265359 ABCD'
201 VARIABLES: Found output '"3.14159265359 ABCD"', recursing.
202 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
203 VARIABLES: Expanding '<(_inputs)' to '"3.14159265359 ABCD"'
204 VARIABLES: Matches: {'content': '_outputs', 'is_array': '', 'type': '<', 'replace': '<(_outputs)'}
205 VARIABLES: Expanding '_outputs' to '_outputs'
206 VARIABLES: Matches: {'content': 'var4', 'is_array': '', 'type': '<', 'replace': '<(var4)'}
207 VARIABLES: Expanding 'var4' to 'var4'
208 VARIABLES: Found output 'ABCD', recursing.
209 VARIABLES: Expanding 'ABCD' to 'ABCD'
210 VARIABLES: Expanding '<(var4)' to 'ABCD'
211 VARIABLES: Matches: {'content': 'var7', 'is_array': '', 'type': '<', 'replace': '<(var7)'}
212 VARIABLES: Expanding 'var7' to 'var7'
213 VARIABLES: Found output 'letters_list', recursing.
214 VARIABLES: Expanding 'letters_list' to 'letters_list'
215 VARIABLES: Expanding '<(var7)' to 'letters_list'
216 VARIABLES: Found output 'ABCD letters_list', recursing.
217 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
218 VARIABLES: Expanding '<(_outputs)' to 'ABCD letters_list'
219 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
220 VARIABLES: Expanding 'ABCD' to 'ABCD'
221 VARIABLES: Expanding 'letters_list' to 'letters_list'
222 VARIABLES: Expanding 'dummy' to 'dummy'
223 VARIABLES: Expanding 'target' to 'target'
224 VARIABLES: Expanding 'none' to 'none'
225 VARIABLES: Expanding 'commands.gyp' to 'commands.gyp'
226 VARIABLES: Expanding 'commands.gypi' to 'commands.gypi'
227 VARIABLES: Expanding 'dummy' to 'dummy'
228 VARIABLES: Expanding 'target' to 'target'
229 VARIABLES: Expanding 'none' to 'none'
230 VARIABLES: Expanding 'letters_' to 'letters_'
231 VARIABLES: Expanding 'ABCD' to 'ABCD'
232 VARIABLES: Expanding 'list' to 'list'
233 VARIABLES: Expanding '3.14159265359' to '3.14159265359'
234 VARIABLES: Expanding 'ABCD' to 'ABCD'
235 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
236 VARIABLES: Expanding '5blah' to '5blah'
237 VARIABLES: Expanding '13.0' to '13.0'
238 VARIABLES: Expanding '012' to '012'
239 VARIABLES: Expanding '+14' to '+14'
240 VARIABLES: Expanding '7 8 9' to '7 8 9'
241 VARIABLES: Expanding '11 ' to '11 '
242 VARIABLES: Expanding ' 10' to ' 10'
243 VARIABLES: Expanding 'foo' to 'foo'
244 VARIABLES: Expanding 'target' to 'target'
245 VARIABLES: Expanding 'none' to 'none'
246 VARIABLES: Expanding 'letters_list' to 'letters_list'
247 VARIABLES: Expanding 'test_action' to 'test_action'
248 VARIABLES: Expanding 'echo' to 'echo'
249 VARIABLES: Expanding '"3.14159265359 ABCD"' to '"3.14159265359 ABCD"'
250 VARIABLES: Expanding 'ABCD letters_list' to 'ABCD letters_list'
251 VARIABLES: Expanding '3.14159265359 ABCD' to '3.14159265359 ABCD'
252 VARIABLES: Expanding 'ABCD' to 'ABCD'
253 VARIABLES: Expanding 'letters_list' to 'letters_list'
+0
-54
third_party/gyp/test/variables/commands/commands.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['commands.gyp', 'commands.gypi'],
2 'targets': [{'actions': [{'action': ['echo',
3 '"3.14159265359 ABCD"',
4 'ABCD letters_list'],
5 'action_name': 'test_action',
6 'inputs': ['3.14159265359 ABCD'],
7 'outputs': ['ABCD', 'letters_list'],
8 'variables': {'var7': 'letters_list'}}],
9 'configurations': {'Default': {}},
10 'default_configuration': 'Default',
11 'target_name': 'foo',
12 'toolset': 'target',
13 'type': 'none',
14 'variables': {'var1': '3.14159265359',
15 'var10': '7 8 9',
16 'var11': ['7', '8', '9'],
17 'var12': ' 10',
18 'var13': '11 ',
19 'var14': '012',
20 'var15': '13.0',
21 'var16': '+14',
22 'var17': '-15',
23 'var18': '0',
24 'var2': '3.14159265359 ABCD',
25 'var3': 'ABCD',
26 'var4': 'ABCD',
27 'var5': 'letters_',
28 'var6': 'list',
29 'var7': '5',
30 'var8': '5blah',
31 'var9': '6'}},
32 {'configurations': {'Default': {}},
33 'default_configuration': 'Default',
34 'target_name': 'dummy',
35 'toolset': 'target',
36 'type': 'none'}],
37 'variables': {'check_included': 'XYZ',
38 'check_int': '5',
39 'check_list_int': ['7', '8', '9'],
40 'check_lists': ['XYZ', 'ABCDEFGHIJK'],
41 'check_str_int': '6',
42 'included_variable': 'XYZ',
43 'letters_list': 'ABCD',
44 'negative_int': '-15',
45 'not_int_1': ' 10',
46 'not_int_2': '11 ',
47 'not_int_3': '012',
48 'not_int_4': '13.0',
49 'not_int_5': '+14',
50 'other_letters': 'ABCDEFG',
51 'pi': 'import math; print math.pi',
52 'third_letters': 'ABCDEFGHIJK',
53 'zero_int': '0'}}
+0
-16
third_party/gyp/test/variables/commands/commands.gypi less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This file is included from commands.gyp to test evaluation order of includes.
5 {
6 'variables': {
7 'included_variable': 'XYZ',
8 },
9 'targets': [
10 {
11 'target_name': 'dummy',
12 'type': 'none',
13 },
14 ],
15 }
+0
-51
third_party/gyp/test/variables/commands/gyptest-commands-ignore-env.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test that environment variables are ignored when --ignore-environment is
8 specified.
9 """
10
11 import os
12
13 import TestGyp
14
15 os.environ['GYP_DEFINES'] = 'FOO=BAR'
16 os.environ['GYP_GENERATORS'] = 'foo'
17 os.environ['GYP_GENERATOR_FLAGS'] = 'genflag=foo'
18 os.environ['GYP_GENERATOR_OUTPUT'] = 'somedir'
19
20 test = TestGyp.TestGyp(format='gypd')
21
22 expect = test.read('commands.gyp.ignore-env.stdout').replace('\r', '')
23
24 # Set $HOME so that gyp doesn't read the user's actual
25 # ~/.gyp/include.gypi file, which may contain variables
26 # and other settings that would change the output.
27 os.environ['HOME'] = test.workpath()
28
29 test.run_gyp('commands.gyp',
30 '--debug', 'variables', '--debug', 'general',
31 '--ignore-environment',
32 stdout=expect)
33
34 # Verify the commands.gypd against the checked-in expected contents.
35 #
36 # Normally, we should canonicalize line endings in the expected
37 # contents file setting the Subversion svn:eol-style to native,
38 # but that would still fail if multiple systems are sharing a single
39 # workspace on a network-mounted file system. Consequently, we
40 # massage the Windows line endings ('\r\n') in the output to the
41 # checked-in UNIX endings ('\n').
42
43 contents = test.read('commands.gypd').replace('\r', '')
44 expect = test.read('commands.gypd.golden').replace('\r', '')
45 if not test.match(contents, expect):
46 print "Unexpected contents of `commands.gypd'"
47 test.diff(expect, contents, 'commands.gypd ')
48 test.fail_test()
49
50 test.pass_test()
+0
-45
third_party/gyp/test/variables/commands/gyptest-commands-repeated.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<!()' syntax commands where they are evaluated
8 more then once..
9 """
10
11 import os
12
13 import TestGyp
14
15 test = TestGyp.TestGyp(format='gypd')
16
17 expect = test.read('commands-repeated.gyp.stdout').replace('\r', '')
18
19 # Set $HOME so that gyp doesn't read the user's actual
20 # ~/.gyp/include.gypi file, which may contain variables
21 # and other settings that would change the output.
22 os.environ['HOME'] = test.workpath()
23
24 test.run_gyp('commands-repeated.gyp',
25 '--debug', 'variables', '--debug', 'general',
26 stdout=expect)
27
28 # Verify the commands-repeated.gypd against the checked-in expected contents.
29 #
30 # Normally, we should canonicalize line endings in the expected
31 # contents file setting the Subversion svn:eol-style to native,
32 # but that would still fail if multiple systems are sharing a single
33 # workspace on a network-mounted file system. Consequently, we
34 # massage the Windows line endings ('\r\n') in the output to the
35 # checked-in UNIX endings ('\n').
36
37 contents = test.read('commands-repeated.gypd').replace('\r', '')
38 expect = test.read('commands-repeated.gypd.golden').replace('\r', '')
39 if not test.match(contents, expect):
40 print "Unexpected contents of `commands-repeated.gypd'"
41 test.diff(expect, contents, 'commands-repeated.gypd ')
42 test.fail_test()
43
44 test.pass_test()
+0
-44
third_party/gyp/test/variables/commands/gyptest-commands.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<!()' syntax commands.
8 """
9
10 import os
11
12 import TestGyp
13
14 test = TestGyp.TestGyp(format='gypd')
15
16 expect = test.read('commands.gyp.stdout').replace('\r', '')
17
18 # Set $HOME so that gyp doesn't read the user's actual
19 # ~/.gyp/include.gypi file, which may contain variables
20 # and other settings that would change the output.
21 os.environ['HOME'] = test.workpath()
22
23 test.run_gyp('commands.gyp',
24 '--debug', 'variables', '--debug', 'general',
25 stdout=expect)
26
27 # Verify the commands.gypd against the checked-in expected contents.
28 #
29 # Normally, we should canonicalize line endings in the expected
30 # contents file setting the Subversion svn:eol-style to native,
31 # but that would still fail if multiple systems are sharing a single
32 # workspace on a network-mounted file system. Consequently, we
33 # massage the Windows line endings ('\r\n') in the output to the
34 # checked-in UNIX endings ('\n').
35
36 contents = test.read('commands.gypd').replace('\r', '')
37 expect = test.read('commands.gypd.golden').replace('\r', '')
38 if not test.match(contents, expect):
39 print "Unexpected contents of `commands.gypd'"
40 test.diff(expect, contents, 'commands.gypd ')
41 test.fail_test()
42
43 test.pass_test()
+0
-11
third_party/gyp/test/variables/commands/update_golden less more
0 #!/bin/bash
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 python ../../../gyp --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.stdout
7 python ../../../gyp --ignore-environment --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout
8 cp -f commands.gypd commands.gypd.golden
9 python ../../../gyp --debug variables --debug general --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout
10 cp -f commands-repeated.gypd commands-repeated.gypd.golden
+0
-174
third_party/gyp/test/variables/filelist/filelist.gyp.stdout less more
0 GENERAL: running with these options:
1 GENERAL: check: None
2 GENERAL: circular_check: True
3 GENERAL: debug: ['variables', 'general']
4 GENERAL: defines: None
5 GENERAL: depth: '.'
6 GENERAL: formats: ['gypd']
7 GENERAL: generator_flags: []
8 GENERAL: generator_output: None
9 GENERAL: includes: None
10 GENERAL: msvs_version: None
11 GENERAL: suffix: ''
12 GENERAL: toplevel_dir: None
13 GENERAL: use_environment: True
14 GENERAL: cmdline_default_variables: {}
15 GENERAL: generator_flags: {}
16 VARIABLES: Expanding 'exclude' to 'exclude'
17 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
18 VARIABLES: Expanding 'include' to 'include'
19 VARIABLES: Expanding '.*dt' to '.*dt'
20 VARIABLES: Expanding 'exclude' to 'exclude'
21 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
22 VARIABLES: Expanding 'John' to 'John'
23 VARIABLES: Expanding 'Jacob' to 'Jacob'
24 VARIABLES: Expanding 'Astor' to 'Astor'
25 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
26 VARIABLES: Expanding 'Jerome' to 'Jerome'
27 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
28 VARIABLES: Expanding 'Schultz' to 'Schultz'
29 VARIABLES: Expanding 'Astor' to 'Astor'
30 VARIABLES: Expanding '.' to '.'
31 VARIABLES: Matches: {'content': 'names.txt <@(names', 'is_array': '', 'type': '<|', 'replace': '<|(names.txt <@(names)'}
32 VARIABLES: Matches: {'content': 'names', 'is_array': '', 'type': '<@', 'replace': '<@(names)'}
33 VARIABLES: Expanding 'names' to 'names'
34 VARIABLES: Expanding 'John' to 'John'
35 VARIABLES: Expanding 'Jacob' to 'Jacob'
36 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
37 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
38 VARIABLES: Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing.
39 VARIABLES: Expanding 'names.txt John Jacob Jingleheimer Schmidt' to 'names.txt John Jacob Jingleheimer Schmidt'
40 VARIABLES: Expanding 'names.txt <@(names)' to 'names.txt John Jacob Jingleheimer Schmidt'
41 VARIABLES: Found output 'names.txt', recursing.
42 VARIABLES: Expanding 'names.txt' to 'names.txt'
43 VARIABLES: Expanding '<|(names.txt <@(names))' to 'names.txt'
44 VARIABLES: Expanding 'foo' to 'foo'
45 VARIABLES: Expanding 'target' to 'target'
46 VARIABLES: Expanding 'none' to 'none'
47 VARIABLES: Expanding 'test_action' to 'test_action'
48 VARIABLES: Expanding 'python' to 'python'
49 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
50 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
51 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
52 VARIABLES: Found output 'names.txt', recursing.
53 VARIABLES: Expanding 'names.txt' to 'names.txt'
54 VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
55 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
56 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
57 VARIABLES: Found output 'names.txt', recursing.
58 VARIABLES: Expanding 'names.txt' to 'names.txt'
59 VARIABLES: Expanding '<(names_listfile)' to 'names.txt'
60 VARIABLES: Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(names_listfile)'}
61 VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'}
62 VARIABLES: Expanding 'names_listfile' to 'names_listfile'
63 VARIABLES: Found output 'cat names.txt', recursing.
64 VARIABLES: Expanding 'cat names.txt' to 'cat names.txt'
65 VARIABLES: Expanding 'cat <(names_listfile)' to 'cat names.txt'
66 VARIABLES: Executing command 'cat names.txt' in directory 'src'
67 VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
68 VARIABLES: Expanding 'John' to 'John'
69 VARIABLES: Expanding 'Jacob' to 'Jacob'
70 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
71 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
72 VARIABLES: Expanding '<!@(cat <(names_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
73 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
74 VARIABLES: Matches: {'content': 'sources.txt <@(_sources', 'is_array': '', 'type': '<|', 'replace': '<|(sources.txt <@(_sources)'}
75 VARIABLES: Matches: {'content': '_sources', 'is_array': '', 'type': '<@', 'replace': '<@(_sources)'}
76 VARIABLES: Expanding '_sources' to '_sources'
77 VARIABLES: Expanding 'John' to 'John'
78 VARIABLES: Expanding 'Jacob' to 'Jacob'
79 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
80 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
81 VARIABLES: Found output 'sources.txt John Jacob Jingleheimer Schmidt', recursing.
82 VARIABLES: Expanding 'sources.txt John Jacob Jingleheimer Schmidt' to 'sources.txt John Jacob Jingleheimer Schmidt'
83 VARIABLES: Expanding 'sources.txt <@(_sources)' to 'sources.txt John Jacob Jingleheimer Schmidt'
84 VARIABLES: Found output 'sources.txt', recursing.
85 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
86 VARIABLES: Expanding '<|(sources.txt <@(_sources))' to 'sources.txt'
87 VARIABLES: Expanding 'bar' to 'bar'
88 VARIABLES: Expanding 'target' to 'target'
89 VARIABLES: Expanding 'none' to 'none'
90 VARIABLES: Expanding 'exclude' to 'exclude'
91 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
92 VARIABLES: Expanding 'include' to 'include'
93 VARIABLES: Expanding '.*dt' to '.*dt'
94 VARIABLES: Expanding 'exclude' to 'exclude'
95 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
96 VARIABLES: Expanding 'Astor' to 'Astor'
97 VARIABLES: Expanding 'test_action' to 'test_action'
98 VARIABLES: Expanding 'python' to 'python'
99 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
100 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
101 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
102 VARIABLES: Found output 'sources.txt', recursing.
103 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
104 VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
105 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
106 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
107 VARIABLES: Found output 'sources.txt', recursing.
108 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
109 VARIABLES: Expanding '<(sources_listfile)' to 'sources.txt'
110 VARIABLES: Matches: {'content': 'cat <(sources_listfile', 'is_array': '', 'type': '<!@', 'replace': '<!@(cat <(sources_listfile)'}
111 VARIABLES: Matches: {'content': 'sources_listfile', 'is_array': '', 'type': '<', 'replace': '<(sources_listfile)'}
112 VARIABLES: Expanding 'sources_listfile' to 'sources_listfile'
113 VARIABLES: Found output 'cat sources.txt', recursing.
114 VARIABLES: Expanding 'cat sources.txt' to 'cat sources.txt'
115 VARIABLES: Expanding 'cat <(sources_listfile)' to 'cat sources.txt'
116 VARIABLES: Executing command 'cat sources.txt' in directory 'src'
117 VARIABLES: Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
118 VARIABLES: Expanding 'John' to 'John'
119 VARIABLES: Expanding 'Jacob' to 'Jacob'
120 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
121 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
122 VARIABLES: Expanding '<!@(cat <(sources_listfile))' to ['John', 'Jacob', 'Jingleheimer', 'Schmidt']
123 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
124 VARIABLES: Expanding 'John' to 'John'
125 VARIABLES: Expanding 'Jacob' to 'Jacob'
126 VARIABLES: Expanding 'Astor' to 'Astor'
127 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
128 VARIABLES: Expanding 'Jerome' to 'Jerome'
129 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
130 VARIABLES: Expanding 'Schultz' to 'Schultz'
131 VARIABLES: Expanding 'filelist.gyp' to 'filelist.gyp'
132 VARIABLES: Expanding 'names.txt' to 'names.txt'
133 VARIABLES: Expanding 'foo' to 'foo'
134 VARIABLES: Expanding 'target' to 'target'
135 VARIABLES: Expanding 'none' to 'none'
136 VARIABLES: Expanding 'test_action' to 'test_action'
137 VARIABLES: Expanding 'python' to 'python'
138 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
139 VARIABLES: Expanding 'names.txt' to 'names.txt'
140 VARIABLES: Expanding 'names.txt' to 'names.txt'
141 VARIABLES: Expanding 'John' to 'John'
142 VARIABLES: Expanding 'Jacob' to 'Jacob'
143 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
144 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
145 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
146 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
147 VARIABLES: Expanding 'bar' to 'bar'
148 VARIABLES: Expanding 'target' to 'target'
149 VARIABLES: Expanding 'none' to 'none'
150 VARIABLES: Expanding 'exclude' to 'exclude'
151 VARIABLES: Expanding 'Sch.*' to 'Sch.*'
152 VARIABLES: Expanding 'include' to 'include'
153 VARIABLES: Expanding '.*dt' to '.*dt'
154 VARIABLES: Expanding 'exclude' to 'exclude'
155 VARIABLES: Expanding 'Jer.*' to 'Jer.*'
156 VARIABLES: Expanding 'Astor' to 'Astor'
157 VARIABLES: Expanding 'test_action' to 'test_action'
158 VARIABLES: Expanding 'python' to 'python'
159 VARIABLES: Expanding 'dummy.py' to 'dummy.py'
160 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
161 VARIABLES: Expanding 'sources.txt' to 'sources.txt'
162 VARIABLES: Expanding 'John' to 'John'
163 VARIABLES: Expanding 'Jacob' to 'Jacob'
164 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
165 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
166 VARIABLES: Expanding 'dummy_foo' to 'dummy_foo'
167 VARIABLES: Expanding 'John' to 'John'
168 VARIABLES: Expanding 'Jacob' to 'Jacob'
169 VARIABLES: Expanding 'Astor' to 'Astor'
170 VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer'
171 VARIABLES: Expanding 'Jerome' to 'Jerome'
172 VARIABLES: Expanding 'Schmidt' to 'Schmidt'
173 VARIABLES: Expanding 'Schultz' to 'Schultz'
+0
-43
third_party/gyp/test/variables/filelist/filelist.gypd.golden less more
0 {'_DEPTH': '.',
1 'included_files': ['filelist.gyp'],
2 'targets': [{'actions': [{'action': ['python', 'dummy.py', 'names.txt'],
3 'action_name': 'test_action',
4 'inputs': ['names.txt',
5 'John',
6 'Jacob',
7 'Jingleheimer',
8 'Schmidt'],
9 'outputs': ['dummy_foo']}],
10 'configurations': {'Default': {}},
11 'default_configuration': 'Default',
12 'target_name': 'foo',
13 'toolset': 'target',
14 'type': 'none',
15 'variables': {'names_listfile': 'names.txt'}},
16 {'actions': [{'action': ['python', 'dummy.py', 'sources.txt'],
17 'action_name': 'test_action',
18 'inputs': ['sources.txt',
19 'John',
20 'Jacob',
21 'Jingleheimer',
22 'Schmidt'],
23 'outputs': ['dummy_foo']}],
24 'configurations': {'Default': {}},
25 'default_configuration': 'Default',
26 'sources': ['John', 'Jacob', 'Jingleheimer', 'Schmidt'],
27 'sources_excluded': ['Astor', 'Jerome', 'Schultz'],
28 'target_name': 'bar',
29 'toolset': 'target',
30 'type': 'none',
31 'variables': {'sources_listfile': 'sources.txt'}}],
32 'variables': {'names': ['John',
33 'Jacob',
34 'Astor',
35 'Jingleheimer',
36 'Jerome',
37 'Schmidt',
38 'Schultz'],
39 'names!': ['Astor'],
40 'names/': [['exclude', 'Sch.*'],
41 ['include', '.*dt'],
42 ['exclude', 'Jer.*']]}}
+0
-55
third_party/gyp/test/variables/filelist/gyptest-filelist.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Test variable expansion of '<|(list.txt ...)' syntax commands.
8 """
9
10 import os
11 import sys
12
13 import TestGyp
14
15 test = TestGyp.TestGyp(format='gypd')
16
17 expect = test.read('filelist.gyp.stdout')
18 if sys.platform == 'win32':
19 expect = expect.replace('/', r'\\').replace('\r', '')
20
21 # Set $HOME so that gyp doesn't read the user's actual
22 # ~/.gyp/include.gypi file, which may contain variables
23 # and other settings that would change the output.
24 os.environ['HOME'] = test.workpath()
25
26 test.run_gyp('src/filelist.gyp',
27 '--debug', 'variables', '--debug', 'general',
28 stdout=expect)
29
30 # Verify the filelist.gypd against the checked-in expected contents.
31 #
32 # Normally, we should canonicalize line endings in the expected
33 # contents file setting the Subversion svn:eol-style to native,
34 # but that would still fail if multiple systems are sharing a single
35 # workspace on a network-mounted file system. Consequently, we
36 # massage the Windows line endings ('\r\n') in the output to the
37 # checked-in UNIX endings ('\n').
38
39 contents = test.read('src/filelist.gypd').replace(
40 '\r', '').replace('\\\\', '/')
41 expect = test.read('filelist.gypd.golden').replace('\r', '')
42 if not test.match(contents, expect):
43 print "Unexpected contents of `src/filelist.gypd'"
44 test.diff(expect, contents, 'src/filelist.gypd ')
45 test.fail_test()
46
47 contents = test.read('src/names.txt')
48 expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
49 if not test.match(contents, expect):
50 print "Unexpected contents of `src/names.txt'"
51 test.diff(expect, contents, 'src/names.txt ')
52 test.fail_test()
53
54 test.pass_test()
+0
-93
third_party/gyp/test/variables/filelist/src/filelist.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 # This is a test to make sure that <|(foo.txt a b c) generates
5 # a pre-calculated file list at gyp time and returns foo.txt.
6 # This feature is useful to work around limits in the number of arguments that
7 # can be passed to rule/action.
8
9 {
10 'variables': {
11 'names': [
12 'John',
13 'Jacob',
14 'Astor',
15 'Jingleheimer',
16 'Jerome',
17 'Schmidt',
18 'Schultz',
19 ],
20 'names!': [
21 'Astor',
22 ],
23 'names/': [
24 ['exclude', 'Sch.*'],
25 ['include', '.*dt'],
26 ['exclude', 'Jer.*'],
27 ],
28 },
29 'targets': [
30 {
31 'target_name': 'foo',
32 'type': 'none',
33 'variables': {
34 'names_listfile': '<|(names.txt <@(names))',
35 },
36 'actions': [
37 {
38 'action_name': 'test_action',
39 'inputs' : [
40 '<(names_listfile)',
41 '<!@(cat <(names_listfile))',
42 ],
43 'outputs': [
44 'dummy_foo',
45 ],
46 'action': [
47 'python', 'dummy.py', '<(names_listfile)',
48 ],
49 },
50 ],
51 },
52 {
53 'target_name': 'bar',
54 'type': 'none',
55 'sources': [
56 'John',
57 'Jacob',
58 'Astor',
59 'Jingleheimer',
60 'Jerome',
61 'Schmidt',
62 'Schultz',
63 ],
64 'sources!': [
65 'Astor',
66 ],
67 'sources/': [
68 ['exclude', 'Sch.*'],
69 ['include', '.*dt'],
70 ['exclude', 'Jer.*'],
71 ],
72 'variables': {
73 'sources_listfile': '<|(sources.txt <@(_sources))',
74 },
75 'actions': [
76 {
77 'action_name': 'test_action',
78 'inputs' : [
79 '<(sources_listfile)',
80 '<!@(cat <(sources_listfile))',
81 ],
82 'outputs': [
83 'dummy_foo',
84 ],
85 'action': [
86 'python', 'dummy.py', '<(sources_listfile)',
87 ],
88 },
89 ],
90 },
91 ],
92 }
+0
-8
third_party/gyp/test/variables/filelist/update_golden less more
0 #!/bin/bash
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 python ../../../gyp --debug variables --debug general --format gypd --depth . src/filelist.gyp > filelist.gyp.stdout
7 cp -f src/filelist.gypd filelist.gypd.golden
+0
-45
third_party/gyp/test/variants/gyptest-variants.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 Verify handling of build variants.
8
9 TODO: Right now, only the SCons generator supports this, so the
10 test case is SCons-specific. In particular, it relise on SCons'
11 ability to rebuild in response to changes on the command line. It
12 may be simpler to just drop this feature if the other generators
13 can't be made to behave the same way.
14 """
15
16 import TestGyp
17
18 test = TestGyp.TestGyp(formats=['scons'])
19
20 test.run_gyp('variants.gyp', chdir='src')
21
22 test.relocate('src', 'relocate/src')
23
24 test.build('variants.gyp', chdir='relocate/src')
25
26 test.run_built_executable('variants',
27 chdir='relocate/src',
28 stdout="Hello, world!\n")
29
30 test.sleep()
31 test.build('variants.gyp', 'VARIANT1=1', chdir='relocate/src')
32
33 test.run_built_executable('variants',
34 chdir='relocate/src',
35 stdout="Hello from VARIANT1\n")
36
37 test.sleep()
38 test.build('variants.gyp', 'VARIANT2=1', chdir='relocate/src')
39
40 test.run_built_executable('variants',
41 chdir='relocate/src',
42 stdout="Hello from VARIANT2\n")
43
44 test.pass_test()
+0
-13
third_party/gyp/test/variants/src/variants.c less more
0 #include <stdio.h>
1
2 int main(int argc, char *argv[])
3 {
4 #if defined(VARIANT1)
5 printf("Hello from VARIANT1\n");
6 #elif defined(VARIANT2)
7 printf("Hello from VARIANT2\n");
8 #else
9 printf("Hello, world!\n");
10 #endif
11 return 0;
12 }
+0
-27
third_party/gyp/test/variants/src/variants.gyp less more
0 # Copyright (c) 2009 Google Inc. All rights reserved.
1 # Use of this source code is governed by a BSD-style license that can be
2 # found in the LICENSE file.
3
4 {
5 'targets': [
6 {
7 'target_name': 'variants',
8 'type': 'executable',
9 'sources': [
10 'variants.c',
11 ],
12 'variants': {
13 'variant1' : {
14 'defines': [
15 'VARIANT1',
16 ],
17 },
18 'variant2' : {
19 'defines': [
20 'VARIANT2',
21 ],
22 },
23 },
24 },
25 ],
26 }
+0
-15
third_party/gyp/tools/README less more
0 pretty_vcproj:
1 Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
2
3 They key/value pair are used to resolve vsprops name.
4
5 For example, if I want to diff the base.vcproj project:
6
7 pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
8 pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
9
10 And you can use your favorite diff tool to see the changes.
11
12 Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
13 I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
14 before you perform the diff.
+0
-142
third_party/gyp/tools/pretty_gyp.py less more
0 #!/usr/bin/env python
1 # Copyright (c) 2009 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 # This file pretty-prints the contents of a GYP file.
6
7 import sys
8 import re
9
10 input = []
11 if len(sys.argv) > 1:
12 input_file = open(sys.argv[1])
13 input = input_file.read().splitlines()
14 input_file.close()
15 else:
16 input = sys.stdin.read().splitlines()
17
18 # This is used to remove comments when we're counting braces.
19 comment_re = re.compile(r'\s*#.*')
20
21 # This is used to remove quoted strings when we're counting braces.
22 # It takes into account quoted quotes, and makes sure that the quotes
23 # match.
24 # NOTE: It does not handle quotes that span more than one line, or
25 # cases where an escaped quote is preceeded by an escaped backslash.
26 quote_re_str = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
27 quote_re = re.compile(quote_re_str)
28
29 def comment_replace(matchobj):
30 return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
31
32 def mask_comments(input):
33 # This is used to mask the quoted strings so we skip braces inside
34 # quoted strings.
35 search_re = re.compile(r'(.*?)(#)(.*)')
36 return [search_re.sub(comment_replace, line) for line in input]
37
38 def quote_replace(matchobj):
39 return "%s%s%s%s" % (matchobj.group(1),
40 matchobj.group(2),
41 'x'*len(matchobj.group(3)),
42 matchobj.group(2))
43
44 def mask_quotes(input):
45 # This is used to mask the quoted strings so we skip braces inside
46 # quoted strings.
47 search_re = re.compile(r'(.*?)' + quote_re_str)
48 return [search_re.sub(quote_replace, line) for line in input]
49
50 def do_split(input, masked_input, search_re):
51 output = []
52 mask_output = []
53 for (line, masked_line) in zip(input, masked_input):
54 m = search_re.match(masked_line)
55 while m:
56 split = len(m.group(1))
57 line = line[:split] + r'\n' + line[split:]
58 masked_line = masked_line[:split] + r'\n' + masked_line[split:]
59 m = search_re.match(masked_line)
60 output.extend(line.split(r'\n'))
61 mask_output.extend(masked_line.split(r'\n'))
62 return (output, mask_output)
63
64 # This masks out the quotes and comments, and then splits appropriate
65 # lines (lines that matche the double_*_brace re's above) before
66 # indenting them below.
67 def split_double_braces(input):
68 # These are used to split lines which have multiple braces on them, so
69 # that the indentation looks prettier when all laid out (e.g. closing
70 # braces make a nice diagonal line).
71 double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
72 double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
73
74 masked_input = mask_quotes(input)
75 masked_input = mask_comments(masked_input)
76
77 (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
78 (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
79
80 return output
81
82 # This keeps track of the number of braces on a given line and returns
83 # the result. It starts at zero and subtracts for closed braces, and
84 # adds for open braces.
85 def count_braces(line):
86 open_braces = ['[', '(', '{']
87 close_braces = [']', ')', '}']
88 closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
89 cnt = 0
90 stripline = comment_re.sub(r'', line)
91 stripline = quote_re.sub(r"''", stripline)
92 for char in stripline:
93 for brace in open_braces:
94 if char == brace:
95 cnt += 1
96 for brace in close_braces:
97 if char == brace:
98 cnt -= 1
99
100 after = False
101 if cnt > 0:
102 after = True
103
104 # This catches the special case of a closing brace having something
105 # other than just whitespace ahead of it -- we don't want to
106 # unindent that until after this line is printed so it stays with
107 # the previous indentation level.
108 if cnt < 0 and closing_prefix_re.match(stripline):
109 after = True
110 return (cnt, after)
111
112 # This does the main work of indenting the input based on the brace counts.
113 def prettyprint_input(lines):
114 indent = 0
115 basic_offset = 2
116 last_line = ""
117 for line in lines:
118 if comment_re.match(line):
119 print line
120 else:
121 line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
122 if len(line) > 0:
123 (brace_diff, after) = count_braces(line)
124 if brace_diff != 0:
125 if after:
126 print " " * (basic_offset * indent) + line
127 indent += brace_diff
128 else:
129 indent += brace_diff
130 print " " * (basic_offset * indent) + line
131 else:
132 print " " * (basic_offset * indent) + line
133 else:
134 print ""
135 last_line = line
136
137 # Split up the double braces.
138 lines = split_double_braces(input)
139
140 # Indent and print the output.
141 prettyprint_input(lines)
+0
-167
third_party/gyp/tools/pretty_sln.py less more
0 #!/usr/bin/python2.5
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Prints the information in a sln file in a diffable way.
7
8 It first outputs each projects in alphabetical order with their
9 dependencies.
10
11 Then it outputs a possible build order.
12 """
13
14 __author__ = 'nsylvain (Nicolas Sylvain)'
15
16 import os
17 import re
18 import sys
19 import pretty_vcproj
20
21 def BuildProject(project, built, projects, deps):
22 # if all dependencies are done, we can build it, otherwise we try to build the
23 # dependency.
24 # This is not infinite-recursion proof.
25 for dep in deps[project]:
26 if dep not in built:
27 BuildProject(dep, built, projects, deps)
28 print project
29 built.append(project)
30
31 def ParseSolution(solution_file):
32 # All projects, their clsid and paths.
33 projects = dict()
34
35 # A list of dependencies associated with a project.
36 dependencies = dict()
37
38 # Regular expressions that matches the SLN format.
39 # The first line of a project definition.
40 begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
41 '}"\) = "(.*)", "(.*)", "(.*)"$'))
42 # The last line of a project definition.
43 end_project = re.compile('^EndProject$')
44 # The first line of a dependency list.
45 begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
46 # The last line of a dependency list.
47 end_dep = re.compile('EndProjectSection$')
48 # A line describing a dependency.
49 dep_line = re.compile(' *({.*}) = ({.*})$')
50
51 in_deps = False
52 solution = open(solution_file)
53 for line in solution:
54 results = begin_project.search(line)
55 if results:
56 # Hack to remove icu because the diff is too different.
57 if results.group(1).find('icu') != -1:
58 continue
59 # We remove "_gyp" from the names because it helps to diff them.
60 current_project = results.group(1).replace('_gyp', '')
61 projects[current_project] = [results.group(2).replace('_gyp', ''),
62 results.group(3),
63 results.group(2)]
64 dependencies[current_project] = []
65 continue
66
67 results = end_project.search(line)
68 if results:
69 current_project = None
70 continue
71
72 results = begin_dep.search(line)
73 if results:
74 in_deps = True
75 continue
76
77 results = end_dep.search(line)
78 if results:
79 in_deps = False
80 continue
81
82 results = dep_line.search(line)
83 if results and in_deps and current_project:
84 dependencies[current_project].append(results.group(1))
85 continue
86
87 # Change all dependencies clsid to name instead.
88 for project in dependencies:
89 # For each dependencies in this project
90 new_dep_array = []
91 for dep in dependencies[project]:
92 # Look for the project name matching this cldis
93 for project_info in projects:
94 if projects[project_info][1] == dep:
95 new_dep_array.append(project_info)
96 dependencies[project] = sorted(new_dep_array)
97
98 return (projects, dependencies)
99
100 def PrintDependencies(projects, deps):
101 print "---------------------------------------"
102 print "Dependencies for all projects"
103 print "---------------------------------------"
104 print "-- --"
105
106 for (project, dep_list) in sorted(deps.items()):
107 print "Project : %s" % project
108 print "Path : %s" % projects[project][0]
109 if dep_list:
110 for dep in dep_list:
111 print " - %s" % dep
112 print ""
113
114 print "-- --"
115
116 def PrintBuildOrder(projects, deps):
117 print "---------------------------------------"
118 print "Build order "
119 print "---------------------------------------"
120 print "-- --"
121
122 built = []
123 for (project, dep_list) in sorted(deps.items()):
124 if project not in built:
125 BuildProject(project, built, projects, deps)
126
127 print "-- --"
128
129 def PrintVCProj(projects):
130
131 for project in projects:
132 print "-------------------------------------"
133 print "-------------------------------------"
134 print project
135 print project
136 print project
137 print "-------------------------------------"
138 print "-------------------------------------"
139
140 project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
141 projects[project][2]))
142
143 pretty = pretty_vcproj
144 argv = [ '',
145 project_path,
146 '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
147 ]
148 argv.extend(sys.argv[3:])
149 pretty.main(argv)
150
151 def main():
152 # check if we have exactly 1 parameter.
153 if len(sys.argv) < 2:
154 print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
155 return
156
157 (projects, deps) = ParseSolution(sys.argv[1])
158 PrintDependencies(projects, deps)
159 PrintBuildOrder(projects, deps)
160
161 if '--recursive' in sys.argv:
162 PrintVCProj(projects)
163
164 if __name__ == '__main__':
165 main()
166
+0
-316
third_party/gyp/tools/pretty_vcproj.py less more
0 #!/usr/bin/python2.5
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Make the format of a vcproj really pretty.
7
8 This script normalize and sort an xml. It also fetches all the properties
9 inside linked vsprops and include them explicitly in the vcproj.
10
11 It outputs the resulting xml to stdout.
12 """
13
14 __author__ = 'nsylvain (Nicolas Sylvain)'
15
16 import os
17 import sys
18
19 from xml.dom.minidom import parse
20 from xml.dom.minidom import Node
21
22 REPLACEMENTS = dict()
23 ARGUMENTS = None
24
25 class CmpTuple:
26 """Compare function between 2 tuple."""
27 def __call__(self, x, y):
28 (key1, value1) = x
29 (key2, value2) = y
30 return cmp(key1, key2)
31
32 class CmpNode:
33 """Compare function between 2 xml nodes."""
34
35 def get_string(self, node):
36 node_string = "node"
37 node_string += node.nodeName
38 if node.nodeValue:
39 node_string += node.nodeValue
40
41 if node.attributes:
42 # We first sort by name, if present.
43 node_string += node.getAttribute("Name")
44
45 all_nodes = []
46 for (name, value) in node.attributes.items():
47 all_nodes.append((name, value))
48
49 all_nodes.sort(CmpTuple())
50 for (name, value) in all_nodes:
51 node_string += name
52 node_string += value
53
54 return node_string
55
56 def __call__(self, x, y):
57 return cmp(self.get_string(x), self.get_string(y))
58
59 def PrettyPrintNode(node, indent=0):
60 if node.nodeType == Node.TEXT_NODE:
61 if node.data.strip():
62 print '%s%s' % (' '*indent, node.data.strip())
63 return
64
65 if node.childNodes:
66 node.normalize()
67 # Get the number of attributes
68 attr_count = 0
69 if node.attributes:
70 attr_count = node.attributes.length
71
72 # Print the main tag
73 if attr_count == 0:
74 print '%s<%s>' % (' '*indent, node.nodeName)
75 else:
76 print '%s<%s' % (' '*indent, node.nodeName)
77
78 all_attributes = []
79 for (name, value) in node.attributes.items():
80 all_attributes.append((name, value))
81 all_attributes.sort(CmpTuple())
82 for (name, value) in all_attributes:
83 print '%s %s="%s"' % (' '*indent, name, value)
84 print '%s>' % (' '*indent)
85 if node.nodeValue:
86 print '%s %s' % (' '*indent, node.nodeValue)
87
88 for sub_node in node.childNodes:
89 PrettyPrintNode(sub_node, indent=indent+2)
90 print '%s</%s>' % (' '*indent, node.nodeName)
91
92 def FlattenFilter(node):
93 """Returns a list of all the node and sub nodes."""
94 node_list = []
95
96 if (node.attributes and
97 node.getAttribute('Name') == '_excluded_files'):
98 # We don't add the "_excluded_files" filter.
99 return []
100
101 for current in node.childNodes:
102 if current.nodeName == 'Filter':
103 node_list.extend(FlattenFilter(current))
104 else:
105 node_list.append(current)
106
107 return node_list
108
109 def FixFilenames(filenames, current_directory):
110 new_list = []
111 for filename in filenames:
112 if filename:
113 for key in REPLACEMENTS:
114 filename = filename.replace(key, REPLACEMENTS[key])
115 os.chdir(current_directory)
116 filename = filename.strip('"\' ')
117 if filename.startswith('$'):
118 new_list.append(filename)
119 else:
120 new_list.append(os.path.abspath(filename))
121 return new_list
122
123 def AbsoluteNode(node):
124 # Make all the properties we know about in this node absolute.
125 if node.attributes:
126 for (name, value) in node.attributes.items():
127 if name in ['InheritedPropertySheets', 'RelativePath',
128 'AdditionalIncludeDirectories',
129 'IntermediateDirectory', 'OutputDirectory',
130 'AdditionalLibraryDirectories']:
131 # We want to fix up these paths
132 path_list = value.split(';')
133 new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
134 node.setAttribute(name, ';'.join(new_list))
135 if not value:
136 node.removeAttribute(name)
137
138 def CleanupVcproj(node):
139 # For each sub node, we call recursively this function.
140 for sub_node in node.childNodes:
141 AbsoluteNode(sub_node)
142 CleanupVcproj(sub_node)
143
144 # Normalize the node, and remove all extranous whitespaces.
145 for sub_node in node.childNodes:
146 if sub_node.nodeType == Node.TEXT_NODE:
147 sub_node.data = sub_node.data.replace("\r", "")
148 sub_node.data = sub_node.data.replace("\n", "")
149 sub_node.data = sub_node.data.rstrip()
150
151 # Fix all the semicolon separated attributes to be sorted, and we also
152 # remove the dups.
153 if node.attributes:
154 for (name, value) in node.attributes.items():
155 sorted_list = sorted(value.split(';'))
156 unique_list = []
157 [unique_list.append(i) for i in sorted_list if not unique_list.count(i)]
158 node.setAttribute(name, ';'.join(unique_list))
159 if not value:
160 node.removeAttribute(name)
161
162 if node.childNodes:
163 node.normalize()
164
165 # For each node, take a copy, and remove it from the list.
166 node_array = []
167 while node.childNodes and node.childNodes[0]:
168 # Take a copy of the node and remove it from the list.
169 current = node.childNodes[0]
170 node.removeChild(current)
171
172 # If the child is a filter, we want to append all its children
173 # to this same list.
174 if current.nodeName == 'Filter':
175 node_array.extend(FlattenFilter(current))
176 else:
177 node_array.append(current)
178
179
180 # Sort the list.
181 node_array.sort(CmpNode())
182
183 # Insert the nodes in the correct order.
184 for new_node in node_array:
185 # But don't append empty tool node.
186 if new_node.nodeName == 'Tool':
187 if new_node.attributes and new_node.attributes.length == 1:
188 # This one was empty.
189 continue
190 if new_node.nodeName == 'UserMacro':
191 continue
192 node.appendChild(new_node)
193
194 def GetConfiguationNodes(vcproj):
195 #TODO(nsylvain): Find a better way to navigate the xml.
196 nodes = []
197 for node in vcproj.childNodes:
198 if node.nodeName == "Configurations":
199 for sub_node in node.childNodes:
200 if sub_node.nodeName == "Configuration":
201 nodes.append(sub_node)
202
203 return nodes
204
205 def GetChildrenVsprops(filename):
206 dom = parse(filename)
207 if dom.documentElement.attributes:
208 vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
209 return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
210 return []
211
212 def SeekToNode(node1, child2):
213 # A text node does not have properties.
214 if child2.nodeType == Node.TEXT_NODE:
215 return None
216
217 # Get the name of the current node.
218 current_name = child2.getAttribute("Name")
219 if not current_name:
220 # There is no name. We don't know how to merge.
221 return None
222
223 # Look through all the nodes to find a match.
224 for sub_node in node1.childNodes:
225 if sub_node.nodeName == child2.nodeName:
226 name = sub_node.getAttribute("Name")
227 if name == current_name:
228 return sub_node
229
230 # No match. We give up.
231 return None
232
233 def MergeAttributes(node1, node2):
234 # No attributes to merge?
235 if not node2.attributes:
236 return
237
238 for (name, value2) in node2.attributes.items():
239 # Don't merge the 'Name' attribute.
240 if name == 'Name':
241 continue
242 value1 = node1.getAttribute(name)
243 if value1:
244 # The attribute exist in the main node. If it's equal, we leave it
245 # untouched, otherwise we concatenate it.
246 if value1 != value2:
247 node1.setAttribute(name, ';'.join([value1, value2]))
248 else:
249 # The attribute does nto exist in the main node. We append this one.
250 node1.setAttribute(name, value2)
251
252 # If the attribute was a property sheet attributes, we remove it, since
253 # they are useless.
254 if name == 'InheritedPropertySheets':
255 node1.removeAttribute(name)
256
257 def MergeProperties(node1, node2):
258 MergeAttributes(node1, node2)
259 for child2 in node2.childNodes:
260 child1 = SeekToNode(node1, child2)
261 if child1:
262 MergeProperties(child1, child2)
263 else:
264 node1.appendChild(child2.cloneNode(True))
265
266 def main(argv):
267 global REPLACEMENTS
268 global ARGUMENTS
269 ARGUMENTS = argv
270 """Main function of this vcproj prettifier."""
271
272 # check if we have exactly 1 parameter.
273 if len(argv) < 2:
274 print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
275 '[key2=value2]' % argv[0])
276 return
277
278 # Parse the keys
279 for i in range(2, len(argv)):
280 (key, value) = argv[i].split('=')
281 REPLACEMENTS[key] = value
282
283 # Open the vcproj and parse the xml.
284 dom = parse(argv[1])
285
286 # First thing we need to do is find the Configuration Node and merge them
287 # with the vsprops they include.
288 for configuration_node in GetConfiguationNodes(dom.documentElement):
289 # Get the property sheets associated with this configuration.
290 vsprops = configuration_node.getAttribute('InheritedPropertySheets')
291
292 # Fix the filenames to be absolute.
293 vsprops_list = FixFilenames(vsprops.strip().split(';'),
294 os.path.dirname(argv[1]))
295
296 # Extend the list of vsprops with all vsprops contained in the current
297 # vsprops.
298 for current_vsprops in vsprops_list:
299 vsprops_list.extend(GetChildrenVsprops(current_vsprops))
300
301 # Now that we have all the vsprops, we need to merge them.
302 for current_vsprops in vsprops_list:
303 MergeProperties(configuration_node,
304 parse(current_vsprops).documentElement)
305
306 # Now that everything is merged, we need to cleanup the xml.
307 CleanupVcproj(dom.documentElement)
308
309 # Finally, we use the prett xml function to print the vcproj back to the
310 # user.
311 #print dom.toprettyxml(newl="\n")
312 PrettyPrintNode(dom.documentElement)
313
314 if __name__ == '__main__':
315 main(sys.argv)
6464 const char kMozcSectionName[] = "engine/Mozc";
6565
6666 // Icon path for MozcTool
67 // TODO(taku): currently, unknown icon is displayed.
68 const char kMozcToolIconPath[] = "unknown.ico";
67 const char kMozcToolIconPath[] = "tool.png";
6968
7069 // for every 5 minutes, call SyncData
7170 const uint64 kSyncDataInterval = 5 * 60;
238237 IBusPropList *sub_prop_list = ibus_prop_list_new();
239238
240239 // Create items for the radio menu.
241 IBusText *label_for_panel = NULL; // e.g. Hiragana letter A.
240 string icon_path_for_panel;
242241 for (size_t i = 0; i < kMozcEnginePropertiesSize; ++i) {
243242 const MozcEngineProperty &entry = kMozcEngineProperties[i];
244243 IBusText *label = ibus_text_new_from_static_string(entry.label);
245244 IBusPropState state = PROP_STATE_UNCHECKED;
246245 if (entry.composition_mode == kMozcEngineInitialCompositionMode) {
247246 state = PROP_STATE_CHECKED;
248 label_for_panel = ibus_text_new_from_static_string(entry.label_for_panel);
247 icon_path_for_panel = GetIconPath(entry.icon);
249248 }
250249 IBusProperty *item = ibus_property_new(entry.key,
251250 PROP_TYPE_RADIO,
260259 ibus_prop_list_append(sub_prop_list, item);
261260 // |sub_prop_list| owns |item| by calling g_object_ref_sink for the |item|.
262261 }
263 DCHECK(label_for_panel) << "All items are disabled by default";
262 DCHECK(!icon_path_for_panel.empty());
264263
265264 // The label of |prop_composition_mode_| is shown in the language panel.
266265 prop_composition_mode_ = ibus_property_new("CompositionMode",
267266 PROP_TYPE_MENU,
268 label_for_panel,
269 NULL /* icon */,
267 NULL /* label */,
268 icon_path_for_panel.c_str(),
270269 NULL /* tooltip */,
271270 TRUE /* sensitive */,
272271 TRUE /* visible */,
286285 for (size_t i = 0; i < kMozcEngineToolPropertiesSize; ++i) {
287286 const MozcEngineToolProperty &entry = kMozcEngineToolProperties[i];
288287 IBusText *label = ibus_text_new_from_static_string(entry.label);
288 // TODO(yusukes): It would be better to use entry.icon here?
289289 IBusProperty *item = ibus_property_new(entry.mode,
290290 PROP_TYPE_NORMAL,
291291 label,
292 NULL,
293 NULL,
292 NULL /* icon */,
293 NULL /* tooltip */,
294294 TRUE,
295295 TRUE,
296296 PROP_STATE_UNCHECKED,
380380 }
381381
382382 void MozcEngine::Disable(IBusEngine *engine) {
383 // TODO(mazda): Implement this.
383 RevertSession(engine);
384384 }
385385
386386 void MozcEngine::Enable(IBusEngine *engine) {
387387 // Launch mozc_server
388388 session_->EnsureConnection();
389389 UpdatePreeditMethod();
390
391 // When ibus-mozc is disabled by the "next input method" hot key, ibus-daemon
392 // does not call MozcEngine::Disable(). Call RevertSession() here so the
393 // mozc_server could discard a preedit string before the hot key is pressed
394 // (crosbug.com/4596).
395 RevertSession(engine);
390396 }
391397
392398 void MozcEngine::FocusIn(IBusEngine *engine) {
394400 }
395401
396402 void MozcEngine::FocusOut(IBusEngine *engine) {
397 commands::SessionCommand command;
398 command.set_type(commands::SessionCommand::REVERT);
399 commands::Output output;
400 session_->SendCommand(command, &output);
401 UpdateAll(engine, output);
403 RevertSession(engine);
402404 SyncData(false);
403405 }
404406
470472 // TODO(mazda): Check if this code is necessary
471473 // if (!consumed) {
472474 // ibus_engine_forward_key_event(engine, keyval, keycode, modifiers);
473 // }
475 // }
474476 return consumed ? TRUE : FALSE;
475477 }
476478
498500 IBusProperty *prop = NULL;
499501
500502 #ifndef OS_CHROMEOS
501 DCHECK(prop_mozc_tool_);
502 while (prop = ibus_prop_list_get(prop_mozc_tool_->sub_props, i++)) {
503 if (!g_strcmp0(property_name, prop->key)) {
504 const MozcEngineToolProperty *entry =
505 reinterpret_cast<const MozcEngineToolProperty*>(
506 g_object_get_data(G_OBJECT(prop), kGObjectDataKey));
507 DCHECK(entry->mode);
508 if (!session_->LaunchTool(entry->mode, "")) {
509 LOG(ERROR) << "cannot launch: " << entry->mode;
503 if (prop_mozc_tool_) {
504 while (prop = ibus_prop_list_get(prop_mozc_tool_->sub_props, i++)) {
505 if (!g_strcmp0(property_name, prop->key)) {
506 const MozcEngineToolProperty *entry =
507 reinterpret_cast<const MozcEngineToolProperty*>(
508 g_object_get_data(G_OBJECT(prop), kGObjectDataKey));
509 DCHECK(entry->mode);
510 if (!session_->LaunchTool(entry->mode, "")) {
511 LOG(ERROR) << "cannot launch: " << entry->mode;
512 }
513 return;
510514 }
511 return;
512515 }
513516 }
514517 #endif
528531 // Update Mozc state.
529532 SetCompositionMode(engine, entry->composition_mode);
530533 // Update the language panel.
531 ibus_property_set_label(
532 prop_composition_mode_,
533 ibus_text_new_from_static_string(entry->label_for_panel));
534 ibus_property_set_icon(prop_composition_mode_,
535 GetIconPath(entry->icon).c_str());
534536 }
535537 // Update the radio menu item.
536538 ibus_property_set_state(prop, PROP_STATE_CHECKED);
553555 }
554556
555557 void MozcEngine::Reset(IBusEngine *engine) {
556 // TODO(mazda): Implement this.
558 RevertSession(engine);
557559 }
558560
559561 void MozcEngine::SetCapabilities(IBusEngine *engine,
849851 }
850852 }
851853
854 void MozcEngine::RevertSession(IBusEngine *engine) {
855 commands::SessionCommand command;
856 command.set_type(commands::SessionCommand::REVERT);
857 commands::Output output;
858 if (!session_->SendCommand(command, &output)) {
859 LOG(ERROR) << "RevertSession() failed";
860 return;
861 }
862 UpdateAll(engine, output);
863 }
864
852865 } // namespace ibus
853866 } // namespace mozc
129129 // always calls SyncData.
130130 void SyncData(bool force);
131131
132 // Reverts internal state of mozc_server by sending SessionCommand::REVERT IPC
133 // message, then hides a preedit string and the candidate window.
134 void RevertSession(IBusEngine *engine);
135
132136 uint64 last_sync_time_;
133137 scoped_ptr<KeyTranslator> key_translator_;
134138 scoped_ptr<client::SessionInterface> session_;
4343 const char *key; // IBus property key for the mode.
4444 const char *label; // text for the radio menu (ibus-anthy compatible).
4545 const char *label_for_panel; // text for the language panel.
46 const char *icon;
4647 } kMozcEngineProperties[] = {
4748 {
4849 commands::DIRECT,
4950 "CompositionMode.Direct",
5051 "Direct input",
5152 "A",
53 "direct.png",
5254 },
5355 {
5456 commands::HIRAGANA,
5557 "CompositionMode.Hiragana",
5658 "Hiragana",
5759 "\xe3\x81\x82", // Hiragana letter A
60 "hiragana.png",
5861 },
5962 {
6063 commands::FULL_KATAKANA,
6164 "CompositionMode.Katakana",
6265 "Katakana",
6366 "\xe3\x82\xa2", // Katakana letter A
67 "katakana_full.png",
6468 },
6569 {
6670 commands::HALF_ASCII,
6771 "CompositionMode.Latin",
6872 "Latin",
6973 "_A",
74 "alpha_half.png",
7075 },
7176 {
7277 commands::FULL_ASCII,
7378 "CompositionMode.WideLatin",
7479 "Wide Latin",
7580 "\xef\xbc\xa1", // Full width ASCII letter A
81 "alpha_full.png",
7682 },
7783 {
7884 commands::HALF_KATAKANA,
7985 "CompositionMode.HalfWidthKatakana",
8086 "Half width katakana",
8187 "_\xef\xbd\xb1", // Half width Katakana letter A
88 "katakana_half.png",
8289 },
8390 };
8491
9299 const char *key; // IBus property key for the MozcTool.
93100 const char *mode; // command line passed as --mode=
94101 const char *label; // text for the menu.
102 const char *icon; // icon
95103 } kMozcEngineToolProperties[] = {
96104 {
97105 "Tool.ConfigDialog",
98106 "config_dialog",
99 "property"
107 "Property",
108 "properties.png",
100109 },
101110 {
102111 "Tool.DictionaryTool",
103112 "dictionary_tool",
104 "dictionary tool"
113 "Dictionary tool",
114 "dictionary.png",
105115 },
106116 {
107117 "Tool.AboutDialog",
108118 "about_dialog",
109 "About Mozc"
119 "About Mozc",
120 NULL,
110121 },
111122 };
112123
4848 const char kPropToolDictionary[] = "/Mozc/Tool/dictionary";
4949 const char kPropToolDictionaryIcon[] = SCIM_ICONDIR "/scim-mozc-dictionary.png";
5050 const char kPropToolProperty[] = "/Mozc/Tool/property";
51 const char kPropToolPropertyIcon[] = SCIM_ICONDIR "/scim-mozc-property.png";
52
53 const char kPropCompositionModeIcon[] = "/Mozc/CompositionMode";
51 const char kPropToolPropertyIcon[] = SCIM_ICONDIR "/scim-mozc-properties.png";
52
53 const char kPropCompositionMode[] = "/Mozc/CompositionMode";
5454
5555 const struct CompositionMode {
5656 const char *icon;
6060 mozc::commands::CompositionMode mode;
6161 } kPropCompositionModes[] = {
6262 {
63 "", // TODO(yusukes): use icons.
63 SCIM_ICONDIR "/scim-mozc-direct.png",
6464 "A",
6565 "/Mozc/CompositionMode/direct",
6666 "Direct",
6767 mozc::commands::DIRECT,
6868 }, {
69 "",
69 SCIM_ICONDIR "/scim-mozc-hiragana.png",
7070 "\xe3\x81\x82", // Hiragana letter A in UTF-8.
7171 "/Mozc/CompositionMode/hiragana",
7272 "Hiragana",
7373 mozc::commands::HIRAGANA,
7474 }, {
75 "",
75 SCIM_ICONDIR "/scim-mozc-katakana_full.png",
7676 "\xe3\x82\xa2", // Katakana letter A.
7777 "/Mozc/CompositionMode/full_katakana",
7878 "Full Katakana",
7979 mozc::commands::FULL_KATAKANA,
8080 }, {
81 "",
81 SCIM_ICONDIR "/scim-mozc-alpha_half.png",
8282 "_A",
8383 "/Mozc/CompositionMode/half_ascii",
8484 "Half ASCII",
8585 mozc::commands::HALF_ASCII,
8686 }, {
87 "",
87 SCIM_ICONDIR "/scim-mozc-alpha_full.png",
8888 "\xef\xbc\xa1", // Full width ASCII letter A.
8989 "/Mozc/CompositionMode/full_ascii",
9090 "Full ASCII",
9191 mozc::commands::FULL_ASCII,
9292 }, {
93 "",
93 SCIM_ICONDIR "/scim-mozc-katakana_half.png",
9494 "_\xef\xbd\xb1", // Half width Katakana letter A.
9595 "/Mozc/CompositionMode/half_katakana",
9696 "Half Katakana",
300300 composition_mode_ = mode;
301301 // Update the bar.
302302 const char *icon = GetCurrentCompositionModeIcon();
303 const char *label = GetCurrentCompositionModeLabel();
304303 scim::Property p = scim::Property(
305 kPropCompositionModeIcon, label, icon, "Composition mode");
304 kPropCompositionMode, "", icon, "Composition mode");
306305 update_property(p);
307306 }
308307
365364
366365 void ScimMozc::InitializeBar() {
367366 VLOG(1) << "Registering properties";
368 // TODO(yusukes): L10N needed for "Tool", "Dictionary", and "Property".
367 // TODO(yusukes): L10N needed for "Tool", "Dictionary tool", and "Property".
369368 scim::PropertyList prop_list;
370369
371370 const char *icon = GetCurrentCompositionModeIcon();
372 const char *label = GetCurrentCompositionModeLabel();
373371 scim::Property p = scim::Property(
374 kPropCompositionModeIcon, label, icon, "Composition mode");
372 kPropCompositionMode, "", icon, "Composition mode");
375373 prop_list.push_back(p);
376374 for (size_t i = 0; i < kNumCompositionModes; ++i) {
377375 p = scim::Property(kPropCompositionModes[i].config_path,
387385 p = scim::Property(kPropTool, "", kPropToolIcon, "Tool");
388386 prop_list.push_back(p);
389387 p = scim::Property(
390 kPropToolDictionary, "Dictionary", kPropToolDictionaryIcon);
388 kPropToolDictionary, "Dictionary tool", kPropToolDictionaryIcon);
391389 prop_list.push_back(p);
392390 p = scim::Property(kPropToolProperty, "Property", kPropToolPropertyIcon);
393391 prop_list.push_back(p);
404402 return "";
405403 }
406404
407 const char *ScimMozc::GetCurrentCompositionModeLabel() const {
408 DCHECK(composition_mode_ < kNumCompositionModes);
409 if (composition_mode_ < kNumCompositionModes) {
410 return kPropCompositionModes[composition_mode_].label;
411 }
412 return "";
413 }
414
415405 } // namespace mozc_unix_scim
0 mozc (0.12.422.102) lucid; urgency=low
1
2 * Ver. 0.12.422.102 on 2010-07-23
3
4 -- Yasuhiro Matsuda <mazda> Fri, 23 Jul 2010 14:33:29 +0900
5
06 mozc (0.12.410.102) lucid; urgency=low
17
28 * Ver. 0.12.410.102 on 2010-07-12
5252 cp -p $(BUILD_DIR)/Release/ibus_mozc $(CURDIR)/debian/ibus-mozc/usr/lib/ibus-mozc/ibus-engine-mozc
5353 mkdir -p $(CURDIR)/debian/ibus-mozc/usr/share/ibus/component/
5454 sed 's|/usr/libexec/ibus-engine-mozc|/usr/lib/ibus-mozc/ibus-engine-mozc|' < $(BUILD_DIR)/Release/obj/gen/unix/ibus/mozc.xml > $(CURDIR)/debian/ibus-mozc/usr/share/ibus/component/mozc.xml
55 mkdir -p $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/
56 cp -p data/images/unix/ime_product_icon_opensource-32.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/product_icon.png
57 cp -p data/images/unix/ui-tool.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/tool.png
58 cp -p data/images/unix/ui-properties.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/properties.png
59 cp -p data/images/unix/ui-dictionary.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/dictionary.png
60 cp -p data/images/unix/ui-direct.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/direct.png
61 cp -p data/images/unix/ui-hiragana.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/hiragana.png
62 cp -p data/images/unix/ui-katakana_half.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/katakana_half.png
63 cp -p data/images/unix/ui-katakana_full.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/katakana_full.png
64 cp -p data/images/unix/ui-alpha_half.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/alpha_half.png
65 cp -p data/images/unix/ui-alpha_full.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/alpha_full.png
5566
5667 mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/IMEngine/
5768 cp -p $(BUILD_DIR)/Release/lib.target/libscim_mozc.so $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/IMEngine/mozc.so
5869 mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/SetupUI/
5970 cp -p $(BUILD_DIR)/Release/lib.target/libscim_mozc_setup.so $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/SetupUI/mozc-setup.so
71 mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/
72 cp -p data/images/unix/ime_product_icon_opensource-32.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc.png
73 cp -p data/images/unix/ui-tool.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-tool.png
74 cp -p data/images/unix/ui-properties.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-properties.png
75 cp -p data/images/unix/ui-dictionary.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-dictionary.png
76 cp -p data/images/unix/ui-direct.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-direct.png
77 cp -p data/images/unix/ui-hiragana.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-hiragana.png
78 cp -p data/images/unix/ui-katakana_half.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-katakana_half.png
79 cp -p data/images/unix/ui-katakana_full.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-katakana_full.png
80 cp -p data/images/unix/ui-alpha_half.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-alpha_half.png
81 cp -p data/images/unix/ui-alpha_full.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-alpha_full.png
6082
6183 mkdir -p $(CURDIR)/debian/mozc-server/usr/lib/mozc
6284 cp -p $(BUILD_DIR)/Release/mozc_server $(CURDIR)/debian/mozc-server/usr/lib/mozc/
3535 namespace usage_stats {
3636 namespace {
3737 const char kStatServerAddress[] =
38 "http://client4.google.com/tbproxy/usagestats";
38 "http://clients4.google.com/tbproxy/usagestats";
3939 const char kStatServerSourceId[] = "sourceid=ime";
4040 const char kStatServerAddedSendHeader[] =
4141 "Content-Type: application/x-www-form-urlencoded";
101101 TEST(UploadUtilTest, UploadTest) {
102102 TestHTTPClient client;
103103 HTTPClient::SetHTTPClientHandler(&client);
104 const string base_url = "http://client4.google.com/tbproxy/usagestats";
104 const string base_url = "http://clients4.google.com/tbproxy/usagestats";
105105 {
106106 TestHTTPClient::Result result;
107107 result.expected_url = base_url + "?sourceid=ime&hl=ja&v=test";
149149 bool val_;
150150 };
151151
152 const char kBaseUrl[] = "http://client4.google.com/tbproxy/usagestats";
152 const char kBaseUrl[] = "http://clients4.google.com/tbproxy/usagestats";
153153
154154 const char kTestClientId[] = "TestClientId";
155155