Package list mozc / ba394a5
Imported Upstream version 0.12.422.102 Nobuhiro Iwamatsu 11 years ago
753 changed file(s) with 303 addition(s) and 48720 deletion(s). Raw diff Collapse all Expand all
4848 = "\\\\.\\mailslot\\googlejapaneseinput.character_pad.";
4949 #ifdef GOOGLE_JAPANESE_INPUT_BUILD
5050 const char kEventPathPrefix[] = "Global\\GoogleJapaneseInput.event.";
51 const char kMutexPathPrefix[] = "Global\\GoogleJapaneseInput.mutex.";
5152 #else
5253 const char kEventPathPrefix[] = "Global\\Mozc.event.";
54 const char kMutexPathPrefix[] = "Global\\Mozc.mutex.";
5355 #endif // GOOGLE_JAPANESE_INPUT_BUILD
5456 const char kMozcServerName[] = "GoogleIMEJaConverter.exe";
5557 const char kIMEFile[] = "GoogleIMEJa.ime";
3636 #ifdef OS_WINDOWS
3737 #include <windows.h>
3838 #include <psapi.h> // GetModuleFileNameExW
39 #endif
39 #else
40 // For stat system call
41 #include <sys/types.h>
42 #include <sys/stat.h>
43 #include <unistd.h>
44 #endif // OS_WINDOWS
4045
4146 #ifdef OS_MACOSX
4247 #include <sys/sysctl.h>
175180 : mutex_(new Mutex),
176181 ipc_path_info_(new ipc::IPCPathInfo),
177182 name_(name),
178 server_pid_(0) {}
183 server_pid_(0),
184 last_modified_(-1) {}
179185
180186 IPCPathManager::~IPCPathManager() {}
181187
233239
234240 VLOG(1) << "ServerIPCKey: " << ipc_path_info_->key();
235241
242 last_modified_ = GetIPCFileTimeStamp();
236243 return true;
237244 }
238245
242249 return false;
243250 }
244251
245 if (ipc_path_info_->key().empty() && !LoadPathName()) {
252 if ((ShouldReload() || ipc_path_info_->key().empty()) && !LoadPathName()) {
246253 LOG(ERROR) << "GetPathName failed";
247254 return false;
248255 }
379386 return false;
380387 }
381388
389 bool IPCPathManager::ShouldReload() const {
390 #ifdef OS_WINDOWS
391 // In windows, no reloading mechanism is necessary because IPC files
392 // are automatically removed.
393 return false;
394 #else
395 scoped_lock l(mutex_.get());
396
397 time_t last_modified = GetIPCFileTimeStamp();
398 if (last_modified == last_modified_) {
399 return false;
400 }
401
402 return true;
403 #endif // OS_WINDOWS
404 }
405
406 time_t IPCPathManager::GetIPCFileTimeStamp() const {
407 #ifdef OS_WINDOWS
408 // In windows, we don't need to get the exact file timestamp, so
409 // just returns -1 at this time.
410 return static_cast<time_t>(-1);
411 #else
412 const string filename = GetIPCKeyFileName(name_);
413 struct stat filestat;
414 if (::stat(filename.c_str(), &filestat) == -1) {
415 VLOG(2) << "stat(2) failed. Skipping reload";
416 return static_cast<time_t>(-1);
417 }
418 return filestat.st_mtime;
419 #endif // OS_WINDOWS
420 }
421
382422 bool IPCPathManager::LoadPathName() {
383423 scoped_lock l(mutex_.get());
384424
459499 VLOG(1) << "ClientIPCKey: " << ipc_path_info_->key();
460500 VLOG(1) << "ProtocolVersion: " << ipc_path_info_->protocol_version();
461501
502 last_modified_ = GetIPCFileTimeStamp();
462503 return true;
463504 }
464505 } // namespace mozc
2929 #ifndef MOZC_IPC_IPC_PATH_MANAGER_H_
3030 #define MOZC_IPC_IPC_PATH_MANAGER_H_
3131
32 #ifdef OS_WINDOWS
33 #include <time.h> // for time_t
34 #else
35 #include <sys/time.h> // for time_t
36 #endif // OS_WINDOWS
3237 #include <string>
3338 #include "base/base.h"
3439 #include "base/mutex.h"
40 // For FRIEND_TEST
41 #include "testing/base/public/gunit_prod.h"
3542
3643 namespace mozc {
3744
9299 virtual ~IPCPathManager();
93100
94101 private:
102 FRIEND_TEST(IPCPathManagerTest, ReloadTest);
95103
96104 // Load ipc name from ~/.mozc/.ipc
97105 // Note that this method overwrites the ipc_key_
98106 bool LoadPathName();
107
108 // Returns true if the ipc file is updated after it load.
109 bool ShouldReload() const;
110
111 // Returns the last modified timestamp of the IPC file.
112 time_t GetIPCFileTimeStamp() const;
99113
100114 scoped_ptr<ProcessMutex> path_mutex_; // lock ipc path file
101115 scoped_ptr<Mutex> mutex_; // mutex for methods
103117 string name_;
104118 string server_path_; // cache for server_path
105119 uint32 server_pid_; // cache for pid of server_path
120 time_t last_modified_;
106121 };
107122 } // mozc
108123
2727 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2828
2929 #include "base/base.h"
30 #include "base/file_stream.h"
31 #include "base/process_mutex.h"
3032 #include "base/util.h"
3133 #include "base/thread.h"
3234 #include "ipc/ipc_path_manager.h"
7173 }
7274 }
7375 };
76 } // anonymous namespace
7477
7578 TEST(IPCPathManagerTest, IPCPathManagerTest) {
7679 mozc::Util::SetUserProfileDirectory(FLAGS_test_tmpdir);
101104 threads[i].Join();
102105 }
103106 }
104 } // namespace
107
108 TEST(IPCPathManagerTest, ReloadTest) {
109 // We have only mock implementations for Windows, so no test should be run.
110 #ifndef OS_WINDOWS
111 mozc::IPCPathManager *manager =
112 mozc::IPCPathManager::GetIPCPathManager("reload_test");
113
114 EXPECT_TRUE(manager->CreateNewPathName());
115 EXPECT_TRUE(manager->SavePathName());
116
117 // Just after the save, there are no need to reload.
118 EXPECT_FALSE(manager->ShouldReload());
119
120 // Modify the saved file explicitly.
121 EXPECT_TRUE(manager->path_mutex_->UnLock());
122 Util::Sleep(1000 /* msec */);
123 string filename = Util::JoinPath(
124 Util::GetUserProfileDirectory(), ".reload_test.ipc");
125 OutputFileStream outf(filename.c_str());
126 outf << "foobar";
127 outf.close();
128
129 EXPECT_TRUE(manager->ShouldReload());
130 #endif // OS_WINDOWS
131 }
105132 } // mozc
3434 #include <windows.h>
3535 #include <string>
3636 #include "base/base.h"
37 #include "base/const.h"
3738 #include "base/mutex.h"
39 #include "base/singleton.h"
3840 #include "base/util.h"
3941 #include "ipc/ipc_path_manager.h"
4042 #include "third_party/mozc/sandbox/security_attributes.h"
6668 reinterpret_cast<FPGetNamedPipeServerProcessId>
6769 (::GetProcAddress(lib, "GetNamedPipeServerProcessId"));
6870 }
71
72 class IPCClientMutex {
73 public:
74 IPCClientMutex() {
75 // Make a kernel mutex object so that multiple ipc connections are
76 // serialized here. In Windows, there is no useful way to serialize
77 // the multiple connections to the single-thread named pipe server.
78 // WaitForNamedPipe doesn't work for this propose as it just lets
79 // clients know that the connection becomes "available" right now.
80 // It doesn't mean that connection is available for the current
81 /// thread. The "available" notification is sent to all waiting ipc
82 // clients at the same time and only one client gets the connection.
83 // This causes redundant and wasteful CreateFile calles.
84 string mutex_name = kMutexPathPrefix;
85 mutex_name += Util::GetUserSidAsString();
86 mutex_name += ".ipc";
87 wstring wmutex_name;
88 Util::UTF8ToWide(mutex_name.c_str(), &wmutex_name);
89
90 LPSECURITY_ATTRIBUTES security_attributes_ptr = NULL;
91 SECURITY_ATTRIBUTES security_attributes;
92 if (!sandbox::MakeSecurityAttributes(&security_attributes)) {
93 LOG(ERROR) << "Cannot make SecurityAttributes";
94 } else {
95 security_attributes_ptr = &security_attributes;
96 }
97
98 // http://msdn.microsoft.com/en-us/library/ms682411(VS.85).aspx:
99 // Two or more processes can call CreateMutex to create the same named
100 // mutex. The first process actually creates the mutex, and subsequent
101 // processes with sufficient access rights simply open a handle to
102 // the existing mutex. This enables multiple processes to get handles
103 // of the same mutex, while relieving the user of the responsibility
104 // of ensuring that the creating process is started first.
105 // When using this technique, you should set the
106 // bInitialOwner flag to FALSE; otherwise, it can be difficult to be
107 // certain which process has initial ownership.
108 ipc_mutex_.reset(::CreateMutex(security_attributes_ptr,
109 FALSE, wmutex_name.c_str()));
110
111 if (ipc_mutex_.get() == NULL) {
112 LOG(ERROR) << "CreateMutex failed: " << ::GetLastError();
113 return;
114 }
115
116 // permit the access from a process runinning with low integrity level
117 if (Util::IsVistaOrLater()) {
118 sandbox::SetMandatoryLabelW(ipc_mutex_.get(),
119 SE_KERNEL_OBJECT, L"NX", L"LW");
120 }
121 }
122
123 virtual ~IPCClientMutex() {}
124
125 HANDLE get() const {
126 return ipc_mutex_.get();
127 }
128
129 private:
130 ScopedHandle ipc_mutex_;
131 };
132
133 // RAII class for calling ReleaseMutex in destructor.
134 class ScopedReleaseMutex {
135 public:
136 ScopedReleaseMutex(HANDLE handle)
137 : handle_(handle) {}
138
139 virtual ~ScopedReleaseMutex() {
140 if (NULL != handle_) {
141 ::ReleaseMutex(handle_);
142 }
143 }
144
145 HANDLE get() const { return handle_; }
146 private:
147 HANDLE handle_;
148 };
69149
70150 uint32 GetServerProcessId(HANDLE handle) {
71151 CallOnce(&g_once, &InitFPGetNamedPipeServerProcessId);
385465 void IPCClient::Init(const string &name, const string &server_path) {
386466 last_ipc_error_ = IPC_NO_CONNECTION;
387467
468 // TODO(taku): ICPClientMutex doesn't take IPC path name into consideration.
469 // Currently, it is not a critical problem, as we only have single
470 // channel (session).
471 ScopedReleaseMutex ipc_mutex(Singleton<IPCClientMutex>::get()->get());
472
473 if (ipc_mutex.get() == NULL) {
474 LOG(ERROR) << "IPC mutex is not available";
475 } else {
476 const int kMutexTimeout = 10 * 1000; // wait at most 10sec.
477 switch (::WaitForSingleObject(ipc_mutex.get(), kMutexTimeout)) {
478 case WAIT_TIMEOUT:
479 // TODO(taku): with suspend/resume, WaitForSingleObject may
480 // return WAIT_TIMEOUT. We have to consider the case
481 // in the future.
482 LOG(ERROR) << "IPC client was not available even after "
483 << kMutexTimeout << " msec.";
484 break;
485 case WAIT_ABANDONED:
486 DLOG(INFO) << "mutex object was removed";
487 break;
488 case WAIT_OBJECT_0:
489 break;
490 default:
491 break;
492 }
493 }
494
388495 IPCPathManager *manager = IPCPathManager::GetIPCPathManager(name);
389496 if (manager == NULL) {
390497 LOG(ERROR) << "IPCPathManager::GetIPCPathManager failed";
440547 }
441548
442549 // wait for 10 second until server is ready
443 // TODO(taku): control the timout via flag.
550 // TODO(taku): control the timeout via flag.
444551 #ifdef _DEBUG
445552 const int kNamedPipeTimeout = 100000; // 100 sec
446553 #else
+0
-5
mozc_build_tools/gyp/AUTHORS less more
0 # Names should be added to this file like so:
1 # Name or Organization <email address>
2
3 Google Inc.
4 Steven Knight <knight@baldmt.com>
+0
-8
mozc_build_tools/gyp/DEPS less more
0 # DEPS file for gclient use in buildbot execution of gyp tests.
1 #
2 # (You don't need to use gclient for normal GYP development work.)
3
4 deps = {
5 "scons":
6 "svn://chrome-svn.corp.google.com/chrome/trunk/src/third_party/scons",
7 }
+0
-27
mozc_build_tools/gyp/LICENSE less more
0 Copyright (c) 2009 Google Inc. All rights reserved.
1
2 Redistribution and use in source and binary forms, with or without
3 modification, are permitted provided that the following conditions are
4 met:
5
6 * Redistributions of source code must retain the above copyright
7 notice, this list of conditions and the following disclaimer.
8 * Redistributions in binary form must reproduce the above
9 copyright notice, this list of conditions and the following disclaimer
10 in the documentation and/or other materials provided with the
11 distribution.
12 * Neither the name of Google Inc. nor the names of its
13 contributors may be used to endorse or promote products derived from
14 this software without specific prior written permission.
15
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+0
-21
mozc_build_tools/gyp/MANIFEST less more
0 setup.py
1 gyp
2 LICENSE
3 AUTHORS
4 pylib/gyp/MSVSNew.py
5 pylib/gyp/MSVSProject.py
6 pylib/gyp/MSVSToolFile.py
7 pylib/gyp/MSVSUserFile.py
8 pylib/gyp/MSVSVersion.py
9 pylib/gyp/SCons.py
10 pylib/gyp/__init__.py
11 pylib/gyp/common.py
12 pylib/gyp/input.py
13 pylib/gyp/xcodeproj_file.py
14 pylib/gyp/generator/__init__.py
15 pylib/gyp/generator/gypd.py
16 pylib/gyp/generator/gypsh.py
17 pylib/gyp/generator/make.py
18 pylib/gyp/generator/msvs.py
19 pylib/gyp/generator/scons.py
20 pylib/gyp/generator/xcode.py
+0
-53
mozc_build_tools/gyp/PRESUBMIT.py less more
0 # Copyright 2010, Google Inc.
1 # All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29
30 EXCLUDED_PATHS = ()
31
32
33 def CheckChangeOnUpload(input_api, output_api):
34 report = []
35 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
36 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
37 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
38 input_api, output_api, sources))
39 return report
40
41
42 def CheckChangeOnCommit(input_api, output_api):
43 report = []
44 black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
45 sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
46 report.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
47 input_api, output_api, sources))
48 report.extend(input_api.canned_checks.CheckTreeIsOpen(
49 input_api, output_api,
50 'http://gyp-status.appspot.com/status',
51 'http://gyp-status.appspot.com/current'))
52 return report
+0
-10
mozc_build_tools/gyp/codereview.settings less more
0 # This file is used by gcl to get repository specific information.
1 CODE_REVIEW_SERVER: codereview.chromium.org
2 CC_LIST: gyp-developer@googlegroups.com
3 VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
4 TRY_ON_UPLOAD: True
5 TRYSERVER_PROJECT: gyp
6 TRYSERVER_PATCHLEVEL: 0
7 TRYSERVER_ROOT: trunk
8 TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
9
+0
-18
mozc_build_tools/gyp/gyp less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import sys
7
8 # TODO(mark): sys.path manipulation is some temporary testing stuff.
9 try:
10 import gyp
11 except ImportError, e:
12 import os.path
13 sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
14 import gyp
15
16 if __name__ == '__main__':
17 sys.exit(gyp.main(sys.argv[1:]))
+0
-5
mozc_build_tools/gyp/gyp.bat less more
0 @rem Copyright (c) 2009 Google Inc. All rights reserved.
1 @rem Use of this source code is governed by a BSD-style license that can be
2 @rem found in the LICENSE file.
3
4 @python "%~dp0/gyp" %*
+0
-7
mozc_build_tools/gyp/gyp_dummy.c less more
0 /* Copyright (c) 2009 Google Inc. All rights reserved.
1 * Use of this source code is governed by a BSD-style license that can be
2 * found in the LICENSE file. */
3
4 int main() {
5 return 0;
6 }
+0
-255
mozc_build_tools/gyp/gyptest.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 __doc__ = """
7 gyptest.py -- test runner for GYP tests.
8 """
9
10 import os
11 import optparse
12 import subprocess
13 import sys
14
15 class CommandRunner:
16 """
17 Executor class for commands, including "commands" implemented by
18 Python functions.
19 """
20 verbose = True
21 active = True
22
23 def __init__(self, dictionary={}):
24 self.subst_dictionary(dictionary)
25
26 def subst_dictionary(self, dictionary):
27 self._subst_dictionary = dictionary
28
29 def subst(self, string, dictionary=None):
30 """
31 Substitutes (via the format operator) the values in the specified
32 dictionary into the specified command.
33
34 The command can be an (action, string) tuple. In all cases, we
35 perform substitution on strings and don't worry if something isn't
36 a string. (It's probably a Python function to be executed.)
37 """
38 if dictionary is None:
39 dictionary = self._subst_dictionary
40 if dictionary:
41 try:
42 string = string % dictionary
43 except TypeError:
44 pass
45 return string
46
47 def display(self, command, stdout=None, stderr=None):
48 if not self.verbose:
49 return
50 if type(command) == type(()):
51 func = command[0]
52 args = command[1:]
53 s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
54 if type(command) == type([]):
55 # TODO: quote arguments containing spaces
56 # TODO: handle meta characters?
57 s = ' '.join(command)
58 else:
59 s = self.subst(command)
60 if not s.endswith('\n'):
61 s += '\n'
62 sys.stdout.write(s)
63 sys.stdout.flush()
64
65 def execute(self, command, stdout=None, stderr=None):
66 """
67 Executes a single command.
68 """
69 if not self.active:
70 return 0
71 if type(command) == type(''):
72 command = self.subst(command)
73 cmdargs = shlex.split(command)
74 if cmdargs[0] == 'cd':
75 command = (os.chdir,) + tuple(cmdargs[1:])
76 if type(command) == type(()):
77 func = command[0]
78 args = command[1:]
79 return func(*args)
80 else:
81 if stdout is sys.stdout:
82 # Same as passing sys.stdout, except python2.4 doesn't fail on it.
83 subout = None
84 else:
85 # Open pipe for anything else so Popen works on python2.4.
86 subout = subprocess.PIPE
87 if stderr is sys.stderr:
88 # Same as passing sys.stderr, except python2.4 doesn't fail on it.
89 suberr = None
90 elif stderr is None:
91 # Merge with stdout if stderr isn't specified.
92 suberr = subprocess.STDOUT
93 else:
94 # Open pipe for anything else so Popen works on python2.4.
95 suberr = subprocess.PIPE
96 p = subprocess.Popen(command,
97 shell=(sys.platform == 'win32'),
98 stdout=subout,
99 stderr=suberr)
100 p.wait()
101 if stdout is None:
102 self.stdout = p.stdout.read()
103 elif stdout is not sys.stdout:
104 stdout.write(p.stdout.read())
105 if stderr not in (None, sys.stderr):
106 stderr.write(p.stderr.read())
107 return p.returncode
108
109 def run(self, command, display=None, stdout=None, stderr=None):
110 """
111 Runs a single command, displaying it first.
112 """
113 if display is None:
114 display = command
115 self.display(display)
116 return self.execute(command, stdout, stderr)
117
118
119 class Unbuffered:
120 def __init__(self, fp):
121 self.fp = fp
122 def write(self, arg):
123 self.fp.write(arg)
124 self.fp.flush()
125 def __getattr__(self, attr):
126 return getattr(self.fp, attr)
127
128 sys.stdout = Unbuffered(sys.stdout)
129 sys.stderr = Unbuffered(sys.stderr)
130
131
132 def find_all_gyptest_files(directory):
133 result = []
134 for root, dirs, files in os.walk(directory):
135 if '.svn' in dirs:
136 dirs.remove('.svn')
137 result.extend([ os.path.join(root, f) for f in files
138 if f.startswith('gyptest') and f.endswith('.py') ])
139 result.sort()
140 return result
141
142
143 def main(argv=None):
144 if argv is None:
145 argv = sys.argv
146
147 usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
148 parser = optparse.OptionParser(usage=usage)
149 parser.add_option("-a", "--all", action="store_true",
150 help="run all tests")
151 parser.add_option("-C", "--chdir", action="store", default=None,
152 help="chdir to the specified directory")
153 parser.add_option("-f", "--format", action="store", default='',
154 help="run tests with the specified formats")
155 parser.add_option("-l", "--list", action="store_true",
156 help="list available tests and exit")
157 parser.add_option("-n", "--no-exec", action="store_true",
158 help="no execute, just print the command line")
159 parser.add_option("--passed", action="store_true",
160 help="report passed tests")
161 parser.add_option("--path", action="append", default=[],
162 help="additional $PATH directory")
163 parser.add_option("-q", "--quiet", action="store_true",
164 help="quiet, don't print test command lines")
165 opts, args = parser.parse_args(argv[1:])
166
167 if opts.chdir:
168 os.chdir(opts.chdir)
169
170 if opts.path:
171 os.environ['PATH'] += ':' + ':'.join(opts.path)
172
173 if not args:
174 if not opts.all:
175 sys.stderr.write('Specify -a to get all tests.\n')
176 return 1
177 args = ['test']
178
179 tests = []
180 for arg in args:
181 if os.path.isdir(arg):
182 tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
183 else:
184 tests.append(arg)
185
186 if opts.list:
187 for test in tests:
188 print test
189 sys.exit(0)
190
191 CommandRunner.verbose = not opts.quiet
192 CommandRunner.active = not opts.no_exec
193 cr = CommandRunner()
194
195 os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
196 if not opts.quiet:
197 sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
198
199 passed = []
200 failed = []
201 no_result = []
202
203 if opts.format:
204 format_list = opts.format.split(',')
205 else:
206 # TODO: not duplicate this mapping from pylib/gyp/__init__.py
207 format_list = [ {
208 'freebsd7': 'make',
209 'freebsd8': 'make',
210 'cygwin': 'msvs',
211 'win32': 'msvs',
212 'linux2': 'make',
213 'darwin': 'xcode',
214 }[sys.platform] ]
215
216 for format in format_list:
217 os.environ['TESTGYP_FORMAT'] = format
218 if not opts.quiet:
219 sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
220
221 for test in tests:
222 status = cr.run([sys.executable, test],
223 stdout=sys.stdout,
224 stderr=sys.stderr)
225 if status == 2:
226 no_result.append(test)
227 elif status:
228 failed.append(test)
229 else:
230 passed.append(test)
231
232 if not opts.quiet:
233 def report(description, tests):
234 if tests:
235 if len(tests) == 1:
236 sys.stdout.write("\n%s the following test:\n" % description)
237 else:
238 fmt = "\n%s the following %d tests:\n"
239 sys.stdout.write(fmt % (description, len(tests)))
240 sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
241
242 if opts.passed:
243 report("Passed", passed)
244 report("Failed", failed)
245 report("No result from", no_result)
246
247 if failed:
248 return 1
249 else:
250 return 0
251
252
253 if __name__ == "__main__":
254 sys.exit(main())
+0
-331
mozc_build_tools/gyp/pylib/gyp/MSVSNew.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """New implementation of Visual Studio project generation for SCons."""
7
8 import common
9 import os
10 import random
11
12 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
13 # and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
14 # available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
15 # preserving 2.4 compatibility.
16 try:
17 import hashlib
18 _new_md5 = hashlib.md5
19 except ImportError:
20 import md5
21 _new_md5 = md5.new
22
23
24 # Initialize random number generator
25 random.seed()
26
27 # GUIDs for project types
28 ENTRY_TYPE_GUIDS = {
29 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
30 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
31 }
32
33 #------------------------------------------------------------------------------
34 # Helper functions
35
36
37 def MakeGuid(name, seed='msvs_new'):
38 """Returns a GUID for the specified target name.
39
40 Args:
41 name: Target name.
42 seed: Seed for MD5 hash.
43 Returns:
44 A GUID-line string calculated from the name and seed.
45
46 This generates something which looks like a GUID, but depends only on the
47 name and seed. This means the same name/seed will always generate the same
48 GUID, so that projects and solutions which refer to each other can explicitly
49 determine the GUID to refer to explicitly. It also means that the GUID will
50 not change when the project for a target is rebuilt.
51 """
52 # Calculate a MD5 signature for the seed and name.
53 d = _new_md5(str(seed) + str(name)).hexdigest().upper()
54 # Convert most of the signature to GUID form (discard the rest)
55 guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
56 + '-' + d[20:32] + '}')
57 return guid
58
59 #------------------------------------------------------------------------------
60
61
62 class MSVSFolder:
63 """Folder in a Visual Studio project or solution."""
64
65 def __init__(self, path, name = None, entries = None,
66 guid = None, items = None):
67 """Initializes the folder.
68
69 Args:
70 path: Full path to the folder.
71 name: Name of the folder.
72 entries: List of folder entries to nest inside this folder. May contain
73 Folder or Project objects. May be None, if the folder is empty.
74 guid: GUID to use for folder, if not None.
75 items: List of solution items to include in the folder project. May be
76 None, if the folder does not directly contain items.
77 """
78 if name:
79 self.name = name
80 else:
81 # Use last layer.
82 self.name = os.path.basename(path)
83
84 self.path = path
85 self.guid = guid
86
87 # Copy passed lists (or set to empty lists)
88 self.entries = list(entries or [])
89 self.items = list(items or [])
90
91 self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
92
93 def get_guid(self):
94 if self.guid is None:
95 # Use consistent guids for folders (so things don't regenerate).
96 self.guid = MakeGuid(self.path, seed='msvs_folder')
97 return self.guid
98
99
100 #------------------------------------------------------------------------------
101
102
103 class MSVSProject:
104 """Visual Studio project."""
105
106 def __init__(self, path, name = None, dependencies = None, guid = None,
107 config_platform_overrides = None):
108 """Initializes the project.
109
110 Args:
111 path: Relative path to project file.
112 name: Name of project. If None, the name will be the same as the base
113 name of the project file.
114 dependencies: List of other Project objects this project is dependent
115 upon, if not None.
116 guid: GUID to use for project, if not None.
117 config_platform_overrides: optional dict of configuration platforms to
118 used in place of the default for this target.
119 """
120 self.path = path
121 self.guid = guid
122
123 if name:
124 self.name = name
125 else:
126 # Use project filename
127 self.name = os.path.splitext(os.path.basename(path))[0]
128
129 # Copy passed lists (or set to empty lists)
130 self.dependencies = list(dependencies or [])
131
132 self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
133
134 if config_platform_overrides:
135 self.config_platform_overrides = config_platform_overrides
136 else:
137 self.config_platform_overrides = {}
138
139 def get_guid(self):
140 if self.guid is None:
141 # Set GUID from path
142 # TODO(rspangler): This is fragile.
143 # 1. We can't just use the project filename sans path, since there could
144 # be multiple projects with the same base name (for example,
145 # foo/unittest.vcproj and bar/unittest.vcproj).
146 # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
147 # GUID is the same whether it's included from base/base.sln or
148 # foo/bar/baz/baz.sln.
149 # 3. The GUID needs to be the same each time this builder is invoked, so
150 # that we don't need to rebuild the solution when the project changes.
151 # 4. We should be able to handle pre-built project files by reading the
152 # GUID from the files.
153 self.guid = MakeGuid(self.name)
154 return self.guid
155
156 #------------------------------------------------------------------------------
157
158
159 class MSVSSolution:
160 """Visual Studio solution."""
161
162 def __init__(self, path, version, entries=None, variants=None,
163 websiteProperties=True):
164 """Initializes the solution.
165
166 Args:
167 path: Path to solution file.
168 version: Format version to emit.
169 entries: List of entries in solution. May contain Folder or Project
170 objects. May be None, if the folder is empty.
171 variants: List of build variant strings. If none, a default list will
172 be used.
173 websiteProperties: Flag to decide if the website properties section
174 is generated.
175 """
176 self.path = path
177 self.websiteProperties = websiteProperties
178 self.version = version
179
180 # Copy passed lists (or set to empty lists)
181 self.entries = list(entries or [])
182
183 if variants:
184 # Copy passed list
185 self.variants = variants[:]
186 else:
187 # Use default
188 self.variants = ['Debug|Win32', 'Release|Win32']
189 # TODO(rspangler): Need to be able to handle a mapping of solution config
190 # to project config. Should we be able to handle variants being a dict,
191 # or add a separate variant_map variable? If it's a dict, we can't
192 # guarantee the order of variants since dict keys aren't ordered.
193
194
195 # TODO(rspangler): Automatically write to disk for now; should delay until
196 # node-evaluation time.
197 self.Write()
198
199
200 def Write(self, writer=common.WriteOnDiff):
201 """Writes the solution file to disk.
202
203 Raises:
204 IndexError: An entry appears multiple times.
205 """
206 # Walk the entry tree and collect all the folders and projects.
207 all_entries = []
208 entries_to_check = self.entries[:]
209 while entries_to_check:
210 # Pop from the beginning of the list to preserve the user's order.
211 e = entries_to_check.pop(0)
212
213 # A project or folder can only appear once in the solution's folder tree.
214 # This also protects from cycles.
215 if e in all_entries:
216 #raise IndexError('Entry "%s" appears more than once in solution' %
217 # e.name)
218 continue
219
220 all_entries.append(e)
221
222 # If this is a folder, check its entries too.
223 if isinstance(e, MSVSFolder):
224 entries_to_check += e.entries
225
226 # Sort by name then guid (so things are in order on vs2008).
227 def NameThenGuid(a, b):
228 if a.name < b.name: return -1
229 if a.name > b.name: return 1
230 if a.get_guid() < b.get_guid(): return -1
231 if a.get_guid() > b.get_guid(): return 1
232 return 0
233
234 all_entries = sorted(all_entries, NameThenGuid)
235
236 # Open file and print header
237 f = writer(self.path)
238 f.write('Microsoft Visual Studio Solution File, '
239 'Format Version %s\r\n' % self.version.SolutionVersion())
240 f.write('# %s\r\n' % self.version.Description())
241
242 # Project entries
243 for e in all_entries:
244 f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
245 e.entry_type_guid, # Entry type GUID
246 e.name, # Folder name
247 e.path.replace('/', '\\'), # Folder name (again)
248 e.get_guid(), # Entry GUID
249 ))
250
251 # TODO(rspangler): Need a way to configure this stuff
252 if self.websiteProperties:
253 f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
254 '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
255 '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
256 '\tEndProjectSection\r\n')
257
258 if isinstance(e, MSVSFolder):
259 if e.items:
260 f.write('\tProjectSection(SolutionItems) = preProject\r\n')
261 for i in e.items:
262 f.write('\t\t%s = %s\r\n' % (i, i))
263 f.write('\tEndProjectSection\r\n')
264
265 if isinstance(e, MSVSProject):
266 if e.dependencies:
267 f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
268 for d in e.dependencies:
269 f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
270 f.write('\tEndProjectSection\r\n')
271
272 f.write('EndProject\r\n')
273
274 # Global section
275 f.write('Global\r\n')
276
277 # Configurations (variants)
278 f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
279 for v in self.variants:
280 f.write('\t\t%s = %s\r\n' % (v, v))
281 f.write('\tEndGlobalSection\r\n')
282
283 # Sort config guids for easier diffing of solution changes.
284 config_guids = []
285 config_guids_overrides = {}
286 for e in all_entries:
287 if isinstance(e, MSVSProject):
288 config_guids.append(e.get_guid())
289 config_guids_overrides[e.get_guid()] = e.config_platform_overrides
290 config_guids.sort()
291
292 f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
293 for g in config_guids:
294 for v in self.variants:
295 nv = config_guids_overrides[g].get(v, v)
296 # Pick which project configuration to build for this solution
297 # configuration.
298 f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
299 g, # Project GUID
300 v, # Solution build configuration
301 nv, # Project build config for that solution config
302 ))
303
304 # Enable project in this solution configuration.
305 f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
306 g, # Project GUID
307 v, # Solution build configuration
308 nv, # Project build config for that solution config
309 ))
310 f.write('\tEndGlobalSection\r\n')
311
312 # TODO(rspangler): Should be able to configure this stuff too (though I've
313 # never seen this be any different)
314 f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
315 f.write('\t\tHideSolutionNode = FALSE\r\n')
316 f.write('\tEndGlobalSection\r\n')
317
318 # Folder mappings
319 # TODO(rspangler): Should omit this section if there are no folders
320 f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
321 for e in all_entries:
322 if not isinstance(e, MSVSFolder):
323 continue # Does not apply to projects, only folders
324 for subentry in e.entries:
325 f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
326 f.write('\tEndGlobalSection\r\n')
327
328 f.write('EndGlobal\r\n')
329
330 f.close()
+0
-244
mozc_build_tools/gyp/pylib/gyp/MSVSProject.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11 import MSVSNew
12
13 #------------------------------------------------------------------------------
14
15
16 class Tool(object):
17 """Visual Studio tool."""
18
19 def __init__(self, name, attrs=None):
20 """Initializes the tool.
21
22 Args:
23 name: Tool name.
24 attrs: Dict of tool attributes; may be None.
25 """
26 self.name = name
27 self.attrs = attrs or {}
28
29 def CreateElement(self, doc):
30 """Creates an element for the tool.
31
32 Args:
33 doc: xml.dom.Document object to use for node creation.
34
35 Returns:
36 A new xml.dom.Element for the tool.
37 """
38 node = doc.createElement('Tool')
39 node.setAttribute('Name', self.name)
40 for k, v in self.attrs.items():
41 node.setAttribute(k, v)
42 return node
43
44
45 class Filter(object):
46 """Visual Studio filter - that is, a virtual folder."""
47
48 def __init__(self, name, contents=None):
49 """Initializes the folder.
50
51 Args:
52 name: Filter (folder) name.
53 contents: List of filenames and/or Filter objects contained.
54 """
55 self.name = name
56 self.contents = list(contents or [])
57
58
59 #------------------------------------------------------------------------------
60
61
62 class Writer(object):
63 """Visual Studio XML project writer."""
64
65 def __init__(self, project_path, version):
66 """Initializes the project.
67
68 Args:
69 project_path: Path to the project file.
70 version: Format version to emit.
71 """
72 self.project_path = project_path
73 self.doc = None
74 self.version = version
75
76 def Create(self, name, guid=None, platforms=None):
77 """Creates the project document.
78
79 Args:
80 name: Name of the project.
81 guid: GUID to use for project, if not None.
82 """
83 self.name = name
84 self.guid = guid or MSVSNew.MakeGuid(self.project_path)
85
86 # Default to Win32 for platforms.
87 if not platforms:
88 platforms = ['Win32']
89
90 # Create XML doc
91 xml_impl = xml.dom.getDOMImplementation()
92 self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
93
94 # Add attributes to root element
95 self.n_root = self.doc.documentElement
96 self.n_root.setAttribute('ProjectType', 'Visual C++')
97 self.n_root.setAttribute('Version', self.version.ProjectVersion())
98 self.n_root.setAttribute('Name', self.name)
99 self.n_root.setAttribute('ProjectGUID', self.guid)
100 self.n_root.setAttribute('RootNamespace', self.name)
101 self.n_root.setAttribute('Keyword', 'Win32Proj')
102
103 # Add platform list
104 n_platform = self.doc.createElement('Platforms')
105 self.n_root.appendChild(n_platform)
106 for platform in platforms:
107 n = self.doc.createElement('Platform')
108 n.setAttribute('Name', platform)
109 n_platform.appendChild(n)
110
111 # Add tool files section
112 self.n_tool_files = self.doc.createElement('ToolFiles')
113 self.n_root.appendChild(self.n_tool_files)
114
115 # Add configurations section
116 self.n_configs = self.doc.createElement('Configurations')
117 self.n_root.appendChild(self.n_configs)
118
119 # Add empty References section
120 self.n_root.appendChild(self.doc.createElement('References'))
121
122 # Add files section
123 self.n_files = self.doc.createElement('Files')
124 self.n_root.appendChild(self.n_files)
125 # Keep a dict keyed on filename to speed up access.
126 self.n_files_dict = dict()
127
128 # Add empty Globals section
129 self.n_root.appendChild(self.doc.createElement('Globals'))
130
131 def AddToolFile(self, path):
132 """Adds a tool file to the project.
133
134 Args:
135 path: Relative path from project to tool file.
136 """
137 n_tool = self.doc.createElement('ToolFile')
138 n_tool.setAttribute('RelativePath', path)
139 self.n_tool_files.appendChild(n_tool)
140
141 def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
142 tools=None):
143 """Adds a configuration to the parent node.
144
145 Args:
146 parent: Destination node.
147 config_type: Type of configuration node.
148 config_name: Configuration name.
149 attrs: Dict of configuration attributes; may be None.
150 tools: List of tools (strings or Tool objects); may be None.
151 """
152 # Handle defaults
153 if not attrs:
154 attrs = {}
155 if not tools:
156 tools = []
157
158 # Add configuration node and its attributes
159 n_config = self.doc.createElement(config_type)
160 n_config.setAttribute('Name', config_name)
161 for k, v in attrs.items():
162 n_config.setAttribute(k, v)
163 parent.appendChild(n_config)
164
165 # Add tool nodes and their attributes
166 if tools:
167 for t in tools:
168 if isinstance(t, Tool):
169 n_config.appendChild(t.CreateElement(self.doc))
170 else:
171 n_config.appendChild(Tool(t).CreateElement(self.doc))
172
173 def AddConfig(self, name, attrs=None, tools=None):
174 """Adds a configuration to the project.
175
176 Args:
177 name: Configuration name.
178 attrs: Dict of configuration attributes; may be None.
179 tools: List of tools (strings or Tool objects); may be None.
180 """
181 self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
182
183 def _AddFilesToNode(self, parent, files):
184 """Adds files and/or filters to the parent node.
185
186 Args:
187 parent: Destination node
188 files: A list of Filter objects and/or relative paths to files.
189
190 Will call itself recursively, if the files list contains Filter objects.
191 """
192 for f in files:
193 if isinstance(f, Filter):
194 node = self.doc.createElement('Filter')
195 node.setAttribute('Name', f.name)
196 self._AddFilesToNode(node, f.contents)
197 else:
198 node = self.doc.createElement('File')
199 node.setAttribute('RelativePath', f)
200 self.n_files_dict[f] = node
201 parent.appendChild(node)
202
203 def AddFiles(self, files):
204 """Adds files to the project.
205
206 Args:
207 files: A list of Filter objects and/or relative paths to files.
208
209 This makes a copy of the file/filter tree at the time of this call. If you
210 later add files to a Filter object which was passed into a previous call
211 to AddFiles(), it will not be reflected in this project.
212 """
213 self._AddFilesToNode(self.n_files, files)
214 # TODO(rspangler) This also doesn't handle adding files to an existing
215 # filter. That is, it doesn't merge the trees.
216
217 def AddFileConfig(self, path, config, attrs=None, tools=None):
218 """Adds a configuration to a file.
219
220 Args:
221 path: Relative path to the file.
222 config: Name of configuration to add.
223 attrs: Dict of configuration attributes; may be None.
224 tools: List of tools (strings or Tool objects); may be None.
225
226 Raises:
227 ValueError: Relative path does not match any file added via AddFiles().
228 """
229 # Find the file node with the right relative path
230 parent = self.n_files_dict.get(path)
231 if not parent:
232 raise ValueError('AddFileConfig: file "%s" not in project.' % path)
233
234 # Add the config to the file node
235 self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
236
237 def Write(self, writer=common.WriteOnDiff):
238 """Writes the project file."""
239 f = writer(self.project_path)
240 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
241 f.close()
242
243 #------------------------------------------------------------------------------
+0
-79
mozc_build_tools/gyp/pylib/gyp/MSVSToolFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio project reader/writer."""
7
8 import common
9 import xml.dom
10 import xml.dom.minidom
11
12
13 #------------------------------------------------------------------------------
14
15
16 class Writer(object):
17 """Visual Studio XML tool file writer."""
18
19 def __init__(self, tool_file_path):
20 """Initializes the tool file.
21
22 Args:
23 tool_file_path: Path to the tool file.
24 """
25 self.tool_file_path = tool_file_path
26 self.doc = None
27
28 def Create(self, name):
29 """Creates the tool file document.
30
31 Args:
32 name: Name of the tool file.
33 """
34 self.name = name
35
36 # Create XML doc
37 xml_impl = xml.dom.getDOMImplementation()
38 self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
39
40 # Add attributes to root element
41 self.n_root = self.doc.documentElement
42 self.n_root.setAttribute('Version', '8.00')
43 self.n_root.setAttribute('Name', self.name)
44
45 # Add rules section
46 self.n_rules = self.doc.createElement('Rules')
47 self.n_root.appendChild(self.n_rules)
48
49 def AddCustomBuildRule(self, name, cmd, description,
50 additional_dependencies,
51 outputs, extensions):
52 """Adds a rule to the tool file.
53
54 Args:
55 name: Name of the rule.
56 description: Description of the rule.
57 cmd: Command line of the rule.
58 additional_dependencies: other files which may trigger the rule.
59 outputs: outputs of the rule.
60 extensions: extensions handled by the rule.
61 """
62 n_rule = self.doc.createElement('CustomBuildRule')
63 n_rule.setAttribute('Name', name)
64 n_rule.setAttribute('ExecutionDescription', description)
65 n_rule.setAttribute('CommandLine', cmd)
66 n_rule.setAttribute('Outputs', ';'.join(outputs))
67 n_rule.setAttribute('FileExtensions', ';'.join(extensions))
68 n_rule.setAttribute('AdditionalDependencies',
69 ';'.join(additional_dependencies))
70 self.n_rules.appendChild(n_rule)
71
72 def Write(self, writer=common.WriteOnDiff):
73 """Writes the tool file."""
74 f = writer(self.tool_file_path)
75 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
76 f.close()
77
78 #------------------------------------------------------------------------------
+0
-182
mozc_build_tools/gyp/pylib/gyp/MSVSUserFile.py less more
0 #!/usr/bin/python2.4
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Visual Studio user preferences file writer."""
7
8 import common
9 import os
10 import re
11 import socket # for gethostname
12 import xml.dom
13 import xml.dom.minidom
14
15
16 #------------------------------------------------------------------------------
17
18 def _FindCommandInPath(command):
19 """If there are no slashes in the command given, this function
20 searches the PATH env to find the given command, and converts it
21 to an absolute path. We have to do this because MSVS is looking
22 for an actual file to launch a debugger on, not just a command
23 line. Note that this happens at GYP time, so anything needing to
24 be built needs to have a full path."""
25 if '/' in command or '\\' in command:
26 # If the command already has path elements (either relative or
27 # absolute), then assume it is constructed properly.
28 return command
29 else:
30 # Search through the path list and find an existing file that
31 # we can access.
32 paths = os.environ.get('PATH','').split(os.pathsep)
33 for path in paths:
34 item = os.path.join(path, command)
35 if os.path.isfile(item) and os.access(item, os.X_OK):
36 return item
37 return command
38
39 def _QuoteWin32CommandLineArgs(args):
40 new_args = []
41 for arg in args:
42 # Replace all double-quotes with double-double-quotes to escape
43 # them for cmd shell, and then quote the whole thing if there
44 # are any.
45 if arg.find('"') != -1:
46 arg = '""'.join(arg.split('"'))
47 arg = '"%s"' % arg
48
49 # Otherwise, if there are any spaces, quote the whole arg.
50 elif re.search(r'[ \t\n]', arg):
51 arg = '"%s"' % arg
52 new_args.append(arg)
53 return new_args
54
55 class Writer(object):
56 """Visual Studio XML user user file writer."""
57
58 def __init__(self, user_file_path, version):
59 """Initializes the user file.
60
61 Args:
62 user_file_path: Path to the user file.
63 """
64 self.user_file_path = user_file_path
65 self.version = version
66 self.doc = None
67
68 def Create(self, name):
69 """Creates the user file document.
70
71 Args:
72 name: Name of the user file.
73 """
74 self.name = name
75
76 # Create XML doc
77 xml_impl = xml.dom.getDOMImplementation()
78 self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
79
80 # Add attributes to root element
81 self.n_root = self.doc.documentElement
82 self.n_root.setAttribute('Version', self.version.ProjectVersion())
83 self.n_root.setAttribute('Name', self.name)
84
85 # Add configurations section
86 self.n_configs = self.doc.createElement('Configurations')
87 self.n_root.appendChild(self.n_configs)
88
89 def _AddConfigToNode(self, parent, config_type, config_name):
90 """Adds a configuration to the parent node.
91
92 Args:
93 parent: Destination node.
94 config_type: Type of configuration node.
95 config_name: Configuration name.
96 """
97 # Add configuration node and its attributes
98 n_config = self.doc.createElement(config_type)
99 n_config.setAttribute('Name', config_name)
100 parent.appendChild(n_config)
101
102 def AddConfig(self, name):
103 """Adds a configuration to the project.
104
105 Args:
106 name: Configuration name.
107 """
108 self._AddConfigToNode(self.n_configs, 'Configuration', name)
109
110
111 def AddDebugSettings(self, config_name, command, environment = {},
112 working_directory=""):
113 """Adds a DebugSettings node to the user file for a particular config.
114
115 Args:
116 command: command line to run. First element in the list is the
117 executable. All elements of the command will be quoted if
118 necessary.
119 working_directory: other files which may trigger the rule. (optional)
120 """
121 command = _QuoteWin32CommandLineArgs(command)
122
123 n_cmd = self.doc.createElement('DebugSettings')
124 abs_command = _FindCommandInPath(command[0])
125 n_cmd.setAttribute('Command', abs_command)
126 n_cmd.setAttribute('WorkingDirectory', working_directory)
127 n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
128 n_cmd.setAttribute('RemoteMachine', socket.gethostname())
129
130 if environment and isinstance(environment, dict):
131 n_cmd.setAttribute('Environment',
132 " ".join(['%s="%s"' % (key, val)
133 for (key,val) in environment.iteritems()]))
134 else:
135 n_cmd.setAttribute('Environment', '')
136
137 n_cmd.setAttribute('EnvironmentMerge', 'true')
138
139 # Currently these are all "dummy" values that we're just setting
140 # in the default manner that MSVS does it. We could use some of
141 # these to add additional capabilities, I suppose, but they might
142 # not have parity with other platforms then.
143 n_cmd.setAttribute('Attach', 'false')
144 n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
145 n_cmd.setAttribute('Remote', '1')
146 n_cmd.setAttribute('RemoteCommand', '')
147 n_cmd.setAttribute('HttpUrl', '')
148 n_cmd.setAttribute('PDBPath', '')
149 n_cmd.setAttribute('SQLDebugging', '')
150 n_cmd.setAttribute('DebuggerFlavor', '0')
151 n_cmd.setAttribute('MPIRunCommand', '')
152 n_cmd.setAttribute('MPIRunArguments', '')
153 n_cmd.setAttribute('MPIRunWorkingDirectory', '')
154 n_cmd.setAttribute('ApplicationCommand', '')
155 n_cmd.setAttribute('ApplicationArguments', '')
156 n_cmd.setAttribute('ShimCommand', '')
157 n_cmd.setAttribute('MPIAcceptMode', '')
158 n_cmd.setAttribute('MPIAcceptFilter', '')
159
160 # Find the config, and add it if it doesn't exist.
161 found = False
162 for config in self.n_configs.childNodes:
163 if config.getAttribute("Name") == config_name:
164 found = True
165
166 if not found:
167 self.AddConfig(config_name)
168
169 # Add the DebugSettings onto the appropriate config.
170 for config in self.n_configs.childNodes:
171 if config.getAttribute("Name") == config_name:
172 config.appendChild(n_cmd)
173 break
174
175 def Write(self, writer=common.WriteOnDiff):
176 """Writes the user file."""
177 f = writer(self.user_file_path)
178 self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
179 f.close()
180
181 #------------------------------------------------------------------------------
+0
-151
mozc_build_tools/gyp/pylib/gyp/MSVSVersion.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Handle version information related to Visual Stuio."""
7
8 import os
9 import re
10 import subprocess
11 import sys
12
13
14 class VisualStudioVersion:
15 """Information regarding a version of Visual Studio."""
16
17 def __init__(self, short_name, description,
18 solution_version, project_version, flat_sln):
19 self.short_name = short_name
20 self.description = description
21 self.solution_version = solution_version
22 self.project_version = project_version
23 self.flat_sln = flat_sln
24
25 def ShortName(self):
26 return self.short_name
27
28 def Description(self):
29 """Get the full description of the version."""
30 return self.description
31
32 def SolutionVersion(self):
33 """Get the version number of the sln files."""
34 return self.solution_version
35
36 def ProjectVersion(self):
37 """Get the version number of the vcproj files."""
38 return self.project_version
39
40 def FlatSolution(self):
41 return self.flat_sln
42
43
44 def _RegistryGetValue(key, value):
45 """Use reg.exe to read a paricular key.
46
47 While ideally we might use the win32 module, we would like gyp to be
48 python neutral, so for instance cygwin python lacks this module.
49
50 Arguments:
51 key: The registry key to read from.
52 value: The particular value to read.
53 Return:
54 The contents there, or None for failure.
55 """
56 # Skip if not on Windows.
57 if sys.platform not in ('win32', 'cygwin'):
58 return None
59 # Run reg.exe.
60 cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
61 'query', key, '/v', value]
62 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
63 text = p.communicate()[0]
64 # Require a successful return value.
65 if p.returncode:
66 return None
67 # Extract value.
68 match = re.search(r'REG_\w+[ ]+([^\r]+)\r\n', text)
69 if not match:
70 return None
71 return match.group(1)
72
73
74 def _CreateVersion(name):
75 versions = {
76 '2008': VisualStudioVersion('2008',
77 'Visual Studio 2008',
78 solution_version='10.00',
79 project_version='9.00',
80 flat_sln=False),
81 '2008e': VisualStudioVersion('2008e',
82 'Visual Studio 2008',
83 solution_version='10.00',
84 project_version='9.00',
85 flat_sln=True),
86 '2005': VisualStudioVersion('2005',
87 'Visual Studio 2005',
88 solution_version='9.00',
89 project_version='8.00',
90 flat_sln=False),
91 '2005e': VisualStudioVersion('2005e',
92 'Visual Studio 2005',
93 solution_version='9.00',
94 project_version='8.00',
95 flat_sln=True),
96 }
97 return versions[str(name)]
98
99
100 def _DetectVisualStudioVersions():
101 """Collect the list of installed visual studio versions.
102
103 Returns:
104 A list of visual studio versions installed in descending order of
105 usage preference.
106 Base this on the registry and a quick check if devenv.exe exists.
107 Only versions 8-9 are considered.
108 Possibilities are:
109 2005 - Visual Studio 2005 (8)
110 2008 - Visual Studio 2008 (9)
111 """
112 version_to_year = {'8.0': '2005', '9.0': '2008'}
113 versions = []
114 for version in ('9.0', '8.0'):
115 # Get the install dir for this version.
116 key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version
117 path = _RegistryGetValue(key, 'InstallDir')
118 if not path:
119 continue
120 # Check for full.
121 if os.path.exists(os.path.join(path, 'devenv.exe')):
122 # Add this one.
123 versions.append(_CreateVersion(version_to_year[version]))
124 # Check for express.
125 elif os.path.exists(os.path.join(path, 'vcexpress.exe')):
126 # Add this one.
127 versions.append(_CreateVersion(version_to_year[version] + 'e'))
128 return versions
129
130
131 def SelectVisualStudioVersion(version='auto'):
132 """Select which version of Visual Studio projects to generate.
133
134 Arguments:
135 version: Hook to allow caller to force a particular version (vs auto).
136 Returns:
137 An object representing a visual studio project format version.
138 """
139 # In auto mode, check environment variable for override.
140 if version == 'auto':
141 version = os.environ.get('GYP_MSVS_VERSION', 'auto')
142 # In auto mode, pick the most preferred version present.
143 if version == 'auto':
144 versions = _DetectVisualStudioVersions()
145 if not versions:
146 # Default to 2005.
147 return _CreateVersion('2005')
148 return versions[0]
149 # Convert version string into a version object.
150 return _CreateVersion(version)
+0
-200
mozc_build_tools/gyp/pylib/gyp/SCons.py less more
0 #!/usr/bin/env python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """
7 SCons generator.
8
9 This contains class definitions and supporting functions for generating
10 pieces of SCons files for the different types of GYP targets.
11 """
12
13 import os
14
15
16 def WriteList(fp, list, prefix='',
17 separator=',\n ',
18 preamble=None,
19 postamble=None):
20 fp.write(preamble or '')
21 fp.write((separator or ' ').join([prefix + l for l in list]))
22 fp.write(postamble or '')
23
24
25 class TargetBase(object):
26 """
27 Base class for a SCons representation of a GYP target.
28 """
29 is_ignored = False
30 target_prefix = ''
31 target_suffix = ''
32 def __init__(self, spec):
33 self.spec = spec
34 def full_product_name(self):
35 """
36 Returns the full name of the product being built:
37
38 * Uses 'product_name' if it's set, else prefix + 'target_name'.
39 * Prepends 'product_dir' if set.
40 * Appends SCons suffix variables for the target type (or
41 product_extension).
42 """
43 suffix = self.target_suffix
44 product_extension = self.spec.get('product_extension')
45 if product_extension:
46 suffix = '.' + product_extension
47 prefix = self.spec.get('product_prefix', self.target_prefix)
48 name = self.spec['target_name']
49 name = prefix + self.spec.get('product_name', name) + suffix
50 product_dir = self.spec.get('product_dir')
51 if product_dir:
52 name = os.path.join(product_dir, name)
53 else:
54 name = os.path.join(self.out_dir, name)
55 return name
56
57 def write_input_files(self, fp):
58 """
59 Writes the definition of the input files (sources).
60 """
61 sources = self.spec.get('sources')
62 if not sources:
63 fp.write('\ninput_files = []\n')
64 return
65 preamble = '\ninput_files = [\n '
66 postamble = ',\n]\n'
67 WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
68
69 def builder_call(self):
70 """
71 Returns the actual SCons builder call to build this target.
72 """
73 name = self.full_product_name()
74 return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
75 def write_target(self, fp, src_dir='', pre=''):
76 """
77 Writes the lines necessary to build this target.
78 """
79 fp.write('\n' + pre)
80 fp.write('_outputs = %s\n' % self.builder_call())
81 fp.write('target_files.extend(_outputs)\n')
82
83
84 class NoneTarget(TargetBase):
85 """
86 A GYP target type of 'none', implicitly or explicitly.
87 """
88 def write_target(self, fp, pre=''):
89 fp.write('\ntarget_files.extend(input_files)\n')
90
91
92 class SettingsTarget(TargetBase):
93 """
94 A GYP target type of 'settings'.
95 """
96 is_ignored = True
97
98
99 compilable_sources_template = """
100 _result = []
101 for infile in input_files:
102 if env.compilable(infile):
103 if (type(infile) == type('')
104 and (infile.startswith(%(src_dir)r)
105 or not os.path.isabs(env.subst(infile)))):
106 # Force files below the build directory by replacing all '..'
107 # elements in the path with '__':
108 base, ext = os.path.splitext(os.path.normpath(infile))
109 base = [d == '..' and '__' or d for d in base.split('/')]
110 base = os.path.join(*base)
111 object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
112 if not infile.startswith(%(src_dir)r):
113 infile = %(src_dir)r + infile
114 infile = env.%(name)s(object, infile)[0]
115 else:
116 infile = env.%(name)s(infile)[0]
117 _result.append(infile)
118 input_files = _result
119 """
120
121 class CompilableSourcesTargetBase(TargetBase):
122 """
123 An abstract base class for targets that compile their source files.
124
125 We explicitly transform compilable files into object files,
126 even though SCons could infer that for us, because we want
127 to control where the object file ends up. (The implicit rules
128 in SCons always put the object file next to the source file.)
129 """
130 intermediate_builder_name = None
131 def write_target(self, fp, src_dir='', pre=''):
132 if self.intermediate_builder_name is None:
133 raise NotImplementedError
134 if src_dir and not src_dir.endswith('/'):
135 src_dir += '/'
136 variables = {
137 'src_dir': src_dir,
138 'name': self.intermediate_builder_name,
139 }
140 fp.write(compilable_sources_template % variables)
141 super(CompilableSourcesTargetBase, self).write_target(fp)
142
143
144 class ProgramTarget(CompilableSourcesTargetBase):
145 """
146 A GYP target type of 'executable'.
147 """
148 builder_name = 'GypProgram'
149 intermediate_builder_name = 'StaticObject'
150 target_prefix = '${PROGPREFIX}'
151 target_suffix = '${PROGSUFFIX}'
152 out_dir = '${TOP_BUILDDIR}'
153
154
155 class StaticLibraryTarget(CompilableSourcesTargetBase):
156 """
157 A GYP target type of 'static_library'.
158 """
159 builder_name = 'GypStaticLibrary'
160 intermediate_builder_name = 'StaticObject'
161 target_prefix = '${LIBPREFIX}'
162 target_suffix = '${LIBSUFFIX}'
163 out_dir = '${LIB_DIR}'
164
165
166 class SharedLibraryTarget(CompilableSourcesTargetBase):
167 """
168 A GYP target type of 'shared_library'.
169 """
170 builder_name = 'GypSharedLibrary'
171 intermediate_builder_name = 'SharedObject'
172 target_prefix = '${SHLIBPREFIX}'
173 target_suffix = '${SHLIBSUFFIX}'
174 out_dir = '${LIB_DIR}'
175
176
177 class LoadableModuleTarget(CompilableSourcesTargetBase):
178 """
179 A GYP target type of 'loadable_module'.
180 """
181 builder_name = 'GypLoadableModule'
182 intermediate_builder_name = 'SharedObject'
183 target_prefix = '${SHLIBPREFIX}'
184 target_suffix = '${SHLIBSUFFIX}'
185 out_dir = '${TOP_BUILDDIR}'
186
187
188 TargetMap = {
189 None : NoneTarget,
190 'none' : NoneTarget,
191 'settings' : SettingsTarget,
192 'executable' : ProgramTarget,
193 'static_library' : StaticLibraryTarget,
194 'shared_library' : SharedLibraryTarget,
195 'loadable_module' : LoadableModuleTarget,
196 }
197
198 def Target(spec):
199 return TargetMap[spec.get('type')](spec)
+0
-453
mozc_build_tools/gyp/pylib/gyp/__init__.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import copy
7 import gyp.input
8 import optparse
9 import os.path
10 import re
11 import shlex
12 import sys
13
14 # Default debug modes for GYP
15 debug = {}
16
17 # List of "official" debug modes, but you can use anything you like.
18 DEBUG_GENERAL = 'general'
19 DEBUG_VARIABLES = 'variables'
20 DEBUG_INCLUDES = 'includes'
21
22 def DebugOutput(mode, message):
23 if mode in gyp.debug.keys():
24 print "%s: %s" % (mode.upper(), message)
25
26 def FindBuildFiles():
27 extension = '.gyp'
28 files = os.listdir(os.getcwd())
29 build_files = []
30 for file in files:
31 if file[-len(extension):] == extension:
32 build_files.append(file)
33 return build_files
34
35
36 def Load(build_files, format, default_variables={},
37 includes=[], depth='.', params={}, check=False, circular_check=True):
38 """
39 Loads one or more specified build files.
40 default_variables and includes will be copied before use.
41 Returns the generator for the specified format and the
42 data returned by loading the specified build files.
43 """
44 default_variables = copy.copy(default_variables)
45
46 # Default variables provided by this program and its modules should be
47 # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
48 # avoiding collisions with user and automatic variables.
49 default_variables['GENERATOR'] = format
50
51 generator_name = 'gyp.generator.' + format
52 # These parameters are passed in order (as opposed to by key)
53 # because ActivePython cannot handle key parameters to __import__.
54 generator = __import__(generator_name, globals(), locals(), generator_name)
55 for (key, val) in generator.generator_default_variables.items():
56 default_variables.setdefault(key, val)
57
58 # Give the generator the opportunity to set additional variables based on
59 # the params it will receive in the output phase.
60 if getattr(generator, 'CalculateVariables', None):
61 generator.CalculateVariables(default_variables, params)
62
63 # Fetch the generator specific info that gets fed to input, we use getattr
64 # so we can default things and the generators only have to provide what
65 # they need.
66 generator_input_info = {
67 'generator_wants_absolute_build_file_paths':
68 getattr(generator, 'generator_wants_absolute_build_file_paths', False),
69 'generator_handles_variants':
70 getattr(generator, 'generator_handles_variants', False),
71 'non_configuration_keys':
72 getattr(generator, 'generator_additional_non_configuration_keys', []),
73 'path_sections':
74 getattr(generator, 'generator_additional_path_sections', []),
75 'extra_sources_for_rules':
76 getattr(generator, 'generator_extra_sources_for_rules', []),
77 'generator_supports_multiple_toolsets':
78 getattr(generator, 'generator_supports_multiple_toolsets', False),
79 }
80
81 # Process the input specific to this generator.
82 result = gyp.input.Load(build_files, default_variables, includes[:],
83 depth, generator_input_info, check, circular_check)
84 return [generator] + result
85
86 def NameValueListToDict(name_value_list):
87 """
88 Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
89 of the pairs. If a string is simply NAME, then the value in the dictionary
90 is set to True. If VALUE can be converted to an integer, it is.
91 """
92 result = { }
93 for item in name_value_list:
94 tokens = item.split('=', 1)
95 if len(tokens) == 2:
96 # If we can make it an int, use that, otherwise, use the string.
97 try:
98 token_value = int(tokens[1])
99 except ValueError:
100 token_value = tokens[1]
101 # Set the variable to the supplied value.
102 result[tokens[0]] = token_value
103 else:
104 # No value supplied, treat it as a boolean and set it.
105 result[tokens[0]] = True
106 return result
107
108 def ShlexEnv(env_name):
109 flags = os.environ.get(env_name, [])
110 if flags:
111 flags = shlex.split(flags)
112 return flags
113
114 def FormatOpt(opt, value):
115 if opt.startswith('--'):
116 return '%s=%s' % (opt, value)
117 return opt + value
118
119 def RegenerateAppendFlag(flag, values, predicate, env_name, options):
120 """Regenerate a list of command line flags, for an option of action='append'.
121
122 The |env_name|, if given, is checked in the environment and used to generate
123 an initial list of options, then the options that were specified on the
124 command line (given in |values|) are appended. This matches the handling of
125 environment variables and command line flags where command line flags override
126 the environment, while not requiring the environment to be set when the flags
127 are used again.
128 """
129 flags = []
130 if options.use_environment and env_name:
131 for flag_value in ShlexEnv(env_name):
132 flags.append(FormatOpt(flag, predicate(flag_value)))
133 if values:
134 for flag_value in values:
135 flags.append(FormatOpt(flag, predicate(flag_value)))
136 return flags
137
138 def RegenerateFlags(options):
139 """Given a parsed options object, and taking the environment variables into
140 account, returns a list of flags that should regenerate an equivalent options
141 object (even in the absence of the environment variables.)
142
143 Any path options will be normalized relative to depth.
144
145 The format flag is not included, as it is assumed the calling generator will
146 set that as appropriate.
147 """
148 def FixPath(path):
149 path = gyp.common.FixIfRelativePath(path, options.depth)
150 if not path:
151 return os.path.curdir
152 return path
153
154 def Noop(value):
155 return value
156
157 # We always want to ignore the environment when regenerating, to avoid
158 # duplicate or changed flags in the environment at the time of regeneration.
159 flags = ['--ignore-environment']
160 for name, metadata in options._regeneration_metadata.iteritems():
161 opt = metadata['opt']
162 value = getattr(options, name)
163 value_predicate = metadata['type'] == 'path' and FixPath or Noop
164 action = metadata['action']
165 env_name = metadata['env_name']
166 if action == 'append':
167 flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
168 env_name, options))
169 elif action in ('store', None): # None is a synonym for 'store'.
170 if value:
171 flags.append(FormatOpt(opt, value_predicate(value)))
172 elif options.use_environment and env_name and os.environ.get(env_name):
173 flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
174 elif action in ('store_true', 'store_false'):
175 if ((action == 'store_true' and value) or
176 (action == 'store_false' and not value)):
177 flags.append(opt)
178 elif options.use_environment and env_name:
179 print >>sys.stderr, ('Warning: environment regeneration unimplemented '
180 'for %s flag %r env_name %r' % (action, opt,
181 env_name))
182 else:
183 print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
184 'flag %r' % (action, opt))
185
186 return flags
187
188 class RegeneratableOptionParser(optparse.OptionParser):
189 def __init__(self):
190 self.__regeneratable_options = {}
191 optparse.OptionParser.__init__(self)
192
193 def add_option(self, *args, **kw):
194 """Add an option to the parser.
195
196 This accepts the same arguments as OptionParser.add_option, plus the
197 following:
198 regenerate: can be set to False to prevent this option from being included
199 in regeneration.
200 env_name: name of environment variable that additional values for this
201 option come from.
202 type: adds type='path', to tell the regenerator that the values of
203 this option need to be made relative to options.depth
204 """
205 env_name = kw.pop('env_name', None)
206 if 'dest' in kw and kw.pop('regenerate', True):
207 dest = kw['dest']
208
209 # The path type is needed for regenerating, for optparse we can just treat
210 # it as a string.
211 type = kw.get('type')
212 if type == 'path':
213 kw['type'] = 'string'
214
215 self.__regeneratable_options[dest] = {
216 'action': kw.get('action'),
217 'type': type,
218 'env_name': env_name,
219 'opt': args[0],
220 }
221
222 optparse.OptionParser.add_option(self, *args, **kw)
223
224 def parse_args(self, *args):
225 values, args = optparse.OptionParser.parse_args(self, *args)
226 values._regeneration_metadata = self.__regeneratable_options
227 return values, args
228
229 def main(args):
230 my_name = os.path.basename(sys.argv[0])
231
232 parser = RegeneratableOptionParser()
233 usage = 'usage: %s [options ...] [build_file ...]'
234 parser.set_usage(usage.replace('%s', '%prog'))
235 parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
236 env_name='GYP_DEFINES',
237 help='sets variable VAR to value VAL')
238 parser.add_option('-f', '--format', dest='formats', action='append',
239 env_name='GYP_GENERATORS', regenerate=False,
240 help='output formats to generate')
241 parser.add_option('--msvs-version', dest='msvs_version',
242 regenerate=False,
243 help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
244 parser.add_option('-I', '--include', dest='includes', action='append',
245 metavar='INCLUDE', type='path',
246 help='files to include in all loaded .gyp files')
247 parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
248 help='set DEPTH gyp variable to a relative path to PATH')
249 parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
250 action='append', default=[], help='turn on a debugging '
251 'mode for debugging GYP. Supported modes are "variables" '
252 'and "general"')
253 parser.add_option('-S', '--suffix', dest='suffix', default='',
254 help='suffix to add to generated files')
255 parser.add_option('-G', dest='generator_flags', action='append', default=[],
256 metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
257 help='sets generator flag FLAG to VAL')
258 parser.add_option('--generator-output', dest='generator_output',
259 action='store', default=None, metavar='DIR', type='path',
260 env_name='GYP_GENERATOR_OUTPUT',
261 help='puts generated build files under DIR')
262 parser.add_option('--ignore-environment', dest='use_environment',
263 action='store_false', default=True, regenerate=False,
264 help='do not read options from environment variables')
265 parser.add_option('--check', dest='check', action='store_true',
266 help='check format of gyp files')
267 # --no-circular-check disables the check for circular relationships between
268 # .gyp files. These relationships should not exist, but they've only been
269 # observed to be harmful with the Xcode generator. Chromium's .gyp files
270 # currently have some circular relationships on non-Mac platforms, so this
271 # option allows the strict behavior to be used on Macs and the lenient
272 # behavior to be used elsewhere.
273 # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
274 parser.add_option('--no-circular-check', dest='circular_check',
275 action='store_false', default=True, regenerate=False,
276 help="don't check for circular relationships between files")
277
278 # We read a few things from ~/.gyp, so set up a var for that.
279 home_vars = ['HOME']
280 if sys.platform in ('cygwin', 'win32'):
281 home_vars.append('USERPROFILE')
282 home = None
283 for home_var in home_vars:
284 home = os.getenv(home_var)
285 if home != None:
286 break
287 home_dot_gyp = None
288 if home != None:
289 home_dot_gyp = os.path.join(home, '.gyp')
290 if not os.path.exists(home_dot_gyp):
291 home_dot_gyp = None
292
293 # TODO(thomasvl): add support for ~/.gyp/defaults
294
295 (options, build_files_arg) = parser.parse_args(args)
296 build_files = build_files_arg
297
298 if not options.formats:
299 # If no format was given on the command line, then check the env variable.
300 generate_formats = []
301 if options.use_environment:
302 generate_formats = os.environ.get('GYP_GENERATORS', [])
303 if generate_formats:
304 generate_formats = re.split('[\s,]', generate_formats)
305 if generate_formats:
306 options.formats = generate_formats
307 else:
308 # Nothing in the variable, default based on platform.
309 options.formats = [ {'darwin': 'xcode',
310 'win32': 'msvs',
311 'cygwin': 'msvs',
312 'freebsd7': 'make',
313 'freebsd8': 'make',
314 'linux2': 'make',
315 'openbsd4': 'make',
316 'sunos5': 'make',}[sys.platform] ]
317
318 if not options.generator_output and options.use_environment:
319 g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
320 if g_o:
321 options.generator_output = g_o
322
323 for mode in options.debug:
324 gyp.debug[mode] = 1
325
326 # Do an extra check to avoid work when we're not debugging.
327 if DEBUG_GENERAL in gyp.debug.keys():
328 DebugOutput(DEBUG_GENERAL, 'running with these options:')
329 for (option, value) in options.__dict__.items():
330 if option[0] == '_':
331 continue
332 if isinstance(value, basestring):
333 DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
334 else:
335 DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
336
337 if not build_files:
338 build_files = FindBuildFiles()
339 if not build_files:
340 print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
341 (my_name, my_name)
342 return 1
343
344 # TODO(mark): Chromium-specific hack!
345 # For Chromium, the gyp "depth" variable should always be a relative path
346 # to Chromium's top-level "src" directory. If no depth variable was set
347 # on the command line, try to find a "src" directory by looking at the
348 # absolute path to each build file's directory. The first "src" component
349 # found will be treated as though it were the path used for --depth.
350 if not options.depth:
351 for build_file in build_files:
352 build_file_dir = os.path.abspath(os.path.dirname(build_file))
353 build_file_dir_components = build_file_dir.split(os.path.sep)
354 components_len = len(build_file_dir_components)
355 for index in xrange(components_len - 1, -1, -1):
356 if build_file_dir_components[index] == 'src':
357 options.depth = os.path.sep.join(build_file_dir_components)
358 break
359 del build_file_dir_components[index]
360
361 # If the inner loop found something, break without advancing to another
362 # build file.
363 if options.depth:
364 break
365
366 if not options.depth:
367 raise Exception, \
368 'Could not automatically locate src directory. This is a ' + \
369 'temporary Chromium feature that will be removed. Use ' + \
370 '--depth as a workaround.'
371
372 # -D on the command line sets variable defaults - D isn't just for define,
373 # it's for default. Perhaps there should be a way to force (-F?) a
374 # variable's value so that it can't be overridden by anything else.
375 cmdline_default_variables = {}
376 defines = []
377 if options.use_environment:
378 defines += ShlexEnv('GYP_DEFINES')
379 if options.defines:
380 defines += options.defines
381 cmdline_default_variables = NameValueListToDict(defines)
382 if DEBUG_GENERAL in gyp.debug.keys():
383 DebugOutput(DEBUG_GENERAL,
384 "cmdline_default_variables: %s" % cmdline_default_variables)
385
386 # Set up includes.
387 includes = []
388
389 # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
390 # .gyp file that's loaded, before anything else is included.
391 if home_dot_gyp != None:
392 default_include = os.path.join(home_dot_gyp, 'include.gypi')
393 if os.path.exists(default_include):
394 includes.append(default_include)
395
396 # Command-line --include files come after the default include.
397 if options.includes:
398 includes.extend(options.includes)
399
400 # Generator flags should be prefixed with the target generator since they
401 # are global across all generator runs.
402 gen_flags = []
403 if options.use_environment:
404 gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
405 if options.generator_flags:
406 gen_flags += options.generator_flags
407 generator_flags = NameValueListToDict(gen_flags)
408 if DEBUG_GENERAL in gyp.debug.keys():
409 DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
410
411 # TODO: Remove this and the option after we've gotten folks to move to the
412 # generator flag.
413 if options.msvs_version:
414 print >>sys.stderr, \
415 'DEPRECATED: Use generator flag (-G msvs_version=' + \
416 options.msvs_version + ') instead of --msvs-version=' + \
417 options.msvs_version
418 generator_flags['msvs_version'] = options.msvs_version
419
420 # Generate all requested formats (use a set in case we got one format request
421 # twice)
422 for format in set(options.formats):
423 params = {'options': options,
424 'build_files': build_files,
425 'generator_flags': generator_flags,
426 'cwd': os.getcwd(),
427 'build_files_arg': build_files_arg,
428 'gyp_binary': sys.argv[0],
429 'home_dot_gyp': home_dot_gyp}
430
431 # Start with the default variables from the command line.
432 [generator, flat_list, targets, data] = Load(build_files, format,
433 cmdline_default_variables,
434 includes, options.depth,
435 params, options.check,
436 options.circular_check)
437
438 # TODO(mark): Pass |data| for now because the generator needs a list of
439 # build files that came in. In the future, maybe it should just accept
440 # a list, and not the whole data dict.
441 # NOTE: flat_list is the flattened dependency graph specifying the order
442 # that targets may be built. Build systems that operate serially or that
443 # need to have dependencies defined before dependents reference them should
444 # generate targets in the order specified in flat_list.
445 generator.GenerateOutput(flat_list, targets, data, params)
446
447 # Done
448 return 0
449
450
451 if __name__ == '__main__':
452 sys.exit(main(sys.argv[1:]))
+0
-343
mozc_build_tools/gyp/pylib/gyp/common.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 import errno
7 import filecmp
8 import os.path
9 import re
10 import tempfile
11 import sys
12
13 def ExceptionAppend(e, msg):
14 """Append a message to the given exception's message."""
15 if not e.args:
16 e.args = (msg,)
17 elif len(e.args) == 1:
18 e.args = (str(e.args[0]) + ' ' + msg,)
19 else:
20 e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
21
22
23 def ParseQualifiedTarget(target):
24 # Splits a qualified target into a build file, target name and toolset.
25
26 # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
27 target_split = target.rsplit(':', 1)
28 if len(target_split) == 2:
29 [build_file, target] = target_split
30 else:
31 build_file = None
32
33 target_split = target.rsplit('#', 1)
34 if len(target_split) == 2:
35 [target, toolset] = target_split
36 else:
37 toolset = None
38
39 return [build_file, target, toolset]
40
41
42 def ResolveTarget(build_file, target, toolset):
43 # This function resolves a target into a canonical form:
44 # - a fully defined build file, either absolute or relative to the current
45 # directory
46 # - a target name
47 # - a toolset
48 #
49 # build_file is the file relative to which 'target' is defined.
50 # target is the qualified target.
51 # toolset is the default toolset for that target.
52 [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
53
54 if parsed_build_file:
55 if build_file:
56 # If a relative path, parsed_build_file is relative to the directory
57 # containing build_file. If build_file is not in the current directory,
58 # parsed_build_file is not a usable path as-is. Resolve it by
59 # interpreting it as relative to build_file. If parsed_build_file is
60 # absolute, it is usable as a path regardless of the current directory,
61 # and os.path.join will return it as-is.
62 build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
63 parsed_build_file))
64 else:
65 build_file = parsed_build_file
66
67 if parsed_toolset:
68 toolset = parsed_toolset
69
70 return [build_file, target, toolset]
71
72
73 def BuildFile(fully_qualified_target):
74 # Extracts the build file from the fully qualified target.
75 return ParseQualifiedTarget(fully_qualified_target)[0]
76
77
78 def QualifiedTarget(build_file, target, toolset):
79 # "Qualified" means the file that a target was defined in and the target
80 # name, separated by a colon, suffixed by a # and the toolset name:
81 # /path/to/file.gyp:target_name#toolset
82 fully_qualified = build_file + ':' + target
83 if toolset:
84 fully_qualified = fully_qualified + '#' + toolset
85 return fully_qualified
86
87
88 def RelativePath(path, relative_to):
89 # Assuming both |path| and |relative_to| are relative to the current
90 # directory, returns a relative path that identifies path relative to
91 # relative_to.
92
93 # Convert to absolute (and therefore normalized paths).
94 path = os.path.abspath(path)
95 relative_to = os.path.abspath(relative_to)
96
97 # Split the paths into components.
98 path_split = path.split(os.path.sep)
99 relative_to_split = relative_to.split(os.path.sep)
100
101 # Determine how much of the prefix the two paths share.
102 prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
103
104 # Put enough ".." components to back up out of relative_to to the common
105 # prefix, and then append the part of path_split after the common prefix.
106 relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
107 path_split[prefix_len:]
108
109 if len(relative_split) == 0:
110 # The paths were the same.
111 return ''
112
113 # Turn it back into a string and we're done.
114 return os.path.join(*relative_split)
115
116
117 def FixIfRelativePath(path, relative_to):
118 # Like RelativePath but returns |path| unchanged if it is absolute.
119 if os.path.isabs(path):
120 return path
121 return RelativePath(path, relative_to)
122
123
124 def UnrelativePath(path, relative_to):
125 # Assuming that |relative_to| is relative to the current directory, and |path|
126 # is a path relative to the dirname of |relative_to|, returns a path that
127 # identifies |path| relative to the current directory.
128 rel_dir = os.path.dirname(relative_to)
129 return os.path.normpath(os.path.join(rel_dir, path))
130
131
132 # re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
133 # http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
134 # and the documentation for various shells.
135
136 # _quote is a pattern that should match any argument that needs to be quoted
137 # with double-quotes by EncodePOSIXShellArgument. It matches the following
138 # characters appearing anywhere in an argument:
139 # \t, \n, space parameter separators
140 # # comments
141 # $ expansions (quoted to always expand within one argument)
142 # % called out by IEEE 1003.1 XCU.2.2
143 # & job control
144 # ' quoting
145 # (, ) subshell execution
146 # *, ?, [ pathname expansion
147 # ; command delimiter
148 # <, >, | redirection
149 # = assignment
150 # {, } brace expansion (bash)
151 # ~ tilde expansion
152 # It also matches the empty string, because "" (or '') is the only way to
153 # represent an empty string literal argument to a POSIX shell.
154 #
155 # This does not match the characters in _escape, because those need to be
156 # backslash-escaped regardless of whether they appear in a double-quoted
157 # string.
158 _quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
159
160 # _escape is a pattern that should match any character that needs to be
161 # escaped with a backslash, whether or not the argument matched the _quote
162 # pattern. _escape is used with re.sub to backslash anything in _escape's
163 # first match group, hence the (parentheses) in the regular expression.
164 #
165 # _escape matches the following characters appearing anywhere in an argument:
166 # " to prevent POSIX shells from interpreting this character for quoting
167 # \ to prevent POSIX shells from interpreting this character for escaping
168 # ` to prevent POSIX shells from interpreting this character for command
169 # substitution
170 # Missing from this list is $, because the desired behavior of
171 # EncodePOSIXShellArgument is to permit parameter (variable) expansion.
172 #
173 # Also missing from this list is !, which bash will interpret as the history
174 # expansion character when history is enabled. bash does not enable history
175 # by default in non-interactive shells, so this is not thought to be a problem.
176 # ! was omitted from this list because bash interprets "\!" as a literal string
177 # including the backslash character (avoiding history expansion but retaining
178 # the backslash), which would not be correct for argument encoding. Handling
179 # this case properly would also be problematic because bash allows the history
180 # character to be changed with the histchars shell variable. Fortunately,
181 # as history is not enabled in non-interactive shells and
182 # EncodePOSIXShellArgument is only expected to encode for non-interactive
183 # shells, there is no room for error here by ignoring !.
184 _escape = re.compile(r'(["\\`])')
185
186 def EncodePOSIXShellArgument(argument):
187 """Encodes |argument| suitably for consumption by POSIX shells.
188
189 argument may be quoted and escaped as necessary to ensure that POSIX shells
190 treat the returned value as a literal representing the argument passed to
191 this function. Parameter (variable) expansions beginning with $ are allowed
192 to remain intact without escaping the $, to allow the argument to contain
193 references to variables to be expanded by the shell.
194 """
195
196 if not isinstance(argument, str):
197 argument = str(argument)
198
199 if _quote.search(argument):
200 quote = '"'
201 else:
202 quote = ''
203
204 encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
205
206 return encoded
207
208
209 def EncodePOSIXShellList(list):
210 """Encodes |list| suitably for consumption by POSIX shells.
211
212 Returns EncodePOSIXShellArgument for each item in list, and joins them
213 together using the space character as an argument separator.
214 """
215
216 encoded_arguments = []
217 for argument in list:
218 encoded_arguments.append(EncodePOSIXShellArgument(argument))
219 return ' '.join(encoded_arguments)
220
221
222 def DeepDependencyTargets(target_dicts, roots):
223 """Returns the recursive list of target dependencies.
224 """
225 dependencies = set()
226 for r in roots:
227 spec = target_dicts[r]
228 r_deps = list(set((spec.get('dependencies', []) +
229 spec.get('dependencies_original', []))))
230 for d in r_deps:
231 if d not in roots:
232 dependencies.add(d)
233 for d in DeepDependencyTargets(target_dicts, r_deps):
234 if d not in roots:
235 dependencies.add(d)
236 return list(dependencies)
237
238
239 def BuildFileTargets(target_list, build_file):
240 """From a target_list, returns the subset from the specified build_file.
241 """
242 return [p for p in target_list if BuildFile(p) == build_file]
243
244
245 def AllTargets(target_list, target_dicts, build_file):
246 """Returns all targets (direct and dependencies) for the specified build_file.
247 """
248 bftargets = BuildFileTargets(target_list, build_file)
249 deptargets = DeepDependencyTargets(target_dicts, bftargets)
250 return bftargets + deptargets
251
252
253 def WriteOnDiff(filename):
254 """Write to a file only if the new contents differ.
255
256 Arguments:
257 filename: name of the file to potentially write to.
258 Returns:
259 A file like object which will write to temporary file and only overwrite
260 the target if it differs (on close).
261 """
262
263 class Writer:
264 """Wrapper around file which only covers the target if it differs."""
265 def __init__(self):
266 # Pick temporary file.
267 tmp_fd, self.tmp_path = tempfile.mkstemp(
268 suffix='.tmp',
269 prefix=os.path.split(filename)[1] + '.gyp.',
270 dir=os.path.split(filename)[0])
271 try:
272 self.tmp_file = os.fdopen(tmp_fd, 'wb')
273 except Exception:
274 # Don't leave turds behind.
275 os.unlink(self.tmp_path)
276 raise
277
278 def __getattr__(self, attrname):
279 # Delegate everything else to self.tmp_file
280 return getattr(self.tmp_file, attrname)
281
282 def close(self):
283 try:
284 # Close tmp file.
285 self.tmp_file.close()
286 # Determine if different.
287 same = False
288 try:
289 same = filecmp.cmp(self.tmp_path, filename, False)
290 except OSError, e:
291 if e.errno != errno.ENOENT:
292 raise
293
294 if same:
295 # The new file is identical to the old one, just get rid of the new
296 # one.
297 os.unlink(self.tmp_path)
298 else:
299 # The new file is different from the old one, or there is no old one.
300 # Rename the new file to the permanent name.
301 #
302 # tempfile.mkstemp uses an overly restrictive mode, resulting in a
303 # file that can only be read by the owner, regardless of the umask.
304 # There's no reason to not respect the umask here, which means that
305 # an extra hoop is required to fetch it and reset the new file's mode.
306 #
307 # No way to get the umask without setting a new one? Set a safe one
308 # and then set it back to the old value.
309 umask = os.umask(077)
310 os.umask(umask)
311 os.chmod(self.tmp_path, 0666 & ~umask)
312 if sys.platform == 'win32' and os.path.exists(filename):
313 # NOTE: on windows (but not cygwin) rename will not replace an
314 # existing file, so it must be preceded with a remove. Sadly there
315 # is no way to make the switch atomic.
316 os.remove(filename)
317 os.rename(self.tmp_path, filename)
318 except Exception:
319 # Don't leave turds behind.
320 os.unlink(self.tmp_path)
321 raise
322
323 return Writer()
324
325
326 # From Alex Martelli,
327 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
328 # ASPN: Python Cookbook: Remove duplicates from a sequence
329 # First comment, dated 2001/10/13.
330 # (Also in the printed Python Cookbook.)
331
332 def uniquer(seq, idfun=None):
333 if idfun is None:
334 def idfun(x): return x
335 seen = {}
336 result = []
337 for item in seq:
338 marker = idfun(item)
339 if marker in seen: continue
340 seen[marker] = 1
341 result.append(item)
342 return result
+0
-0
mozc_build_tools/gyp/pylib/gyp/generator/__init__.py less more
(Empty file)
+0
-88
mozc_build_tools/gyp/pylib/gyp/generator/gypd.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypd output module
7
8 This module produces gyp input as its output. Output files are given the
9 .gypd extension to avoid overwriting the .gyp files that they are generated
10 from. Internal references to .gyp files (such as those found in
11 "dependencies" sections) are not adjusted to point to .gypd files instead;
12 unlike other paths, which are relative to the .gyp or .gypd file, such paths
13 are relative to the directory from which gyp was run to create the .gypd file.
14
15 This generator module is intended to be a sample and a debugging aid, hence
16 the "d" for "debug" in .gypd. It is useful to inspect the results of the
17 various merges, expansions, and conditional evaluations performed by gyp
18 and to see a representation of what would be fed to a generator module.
19
20 It's not advisable to rename .gypd files produced by this module to .gyp,
21 because they will have all merges, expansions, and evaluations already
22 performed and the relevant constructs not present in the output; paths to
23 dependencies may be wrong; and various sections that do not belong in .gyp
24 files such as such as "included_files" and "*_excluded" will be present.
25 Output will also be stripped of comments. This is not intended to be a
26 general-purpose gyp pretty-printer; for that, you probably just want to
27 run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
28 comments but won't do all of the other things done to this module's output.
29
30 The specific formatting of the output generated by this module is subject
31 to change.
32 """
33
34
35 import gyp.common
36 import errno
37 import os
38 import pprint
39
40
41 # These variables should just be spit back out as variable references.
42 _generator_identity_variables = [
43 'EXECUTABLE_PREFIX',
44 'EXECUTABLE_SUFFIX',
45 'INTERMEDIATE_DIR',
46 'PRODUCT_DIR',
47 'RULE_INPUT_ROOT',
48 'RULE_INPUT_EXT',
49 'RULE_INPUT_NAME',
50 'RULE_INPUT_PATH',
51 'SHARED_INTERMEDIATE_DIR',
52 ]
53
54 # gypd doesn't define a default value for OS like many other generator
55 # modules. Specify "-D OS=whatever" on the command line to provide a value.
56 generator_default_variables = {
57 }
58
59 # gypd supports multiple toolsets
60 generator_supports_multiple_toolsets = True
61
62 # TODO(mark): This always uses <, which isn't right. The input module should
63 # notify the generator to tell it which phase it is operating in, and this
64 # module should use < for the early phase and then switch to > for the late
65 # phase. Bonus points for carrying @ back into the output too.
66 for v in _generator_identity_variables:
67 generator_default_variables[v] = '<(%s)' % v
68
69
70 def GenerateOutput(target_list, target_dicts, data, params):
71 output_files = {}
72 for qualified_target in target_list:
73 [input_file, target] = \
74 gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
75
76 if input_file[-4:] != '.gyp':
77 continue
78 input_file_stem = input_file[:-4]
79 output_file = input_file_stem + params['options'].suffix + '.gypd'
80
81 if not output_file in output_files:
82 output_files[output_file] = input_file
83
84 for output_file, input_file in output_files.iteritems():
85 output = open(output_file, 'w')
86 pprint.pprint(data[input_file], output)
87 output.close()
+0
-57
mozc_build_tools/gyp/pylib/gyp/generator/gypsh.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """gypsh output module
7
8 gypsh is a GYP shell. It's not really a generator per se. All it does is
9 fire up an interactive Python session with a few local variables set to the
10 variables passed to the generator. Like gypd, it's intended as a debugging
11 aid, to facilitate the exploration of .gyp structures after being processed
12 by the input module.
13
14 The expected usage is "gyp -f gypsh -D OS=desired_os".
15 """
16
17
18 import code
19 import sys
20
21
22 # All of this stuff about generator variables was lovingly ripped from gypd.py.
23 # That module has a much better description of what's going on and why.
24 _generator_identity_variables = [
25 'EXECUTABLE_PREFIX',
26 'EXECUTABLE_SUFFIX',
27 'INTERMEDIATE_DIR',
28 'PRODUCT_DIR',
29 'RULE_INPUT_ROOT',
30 'RULE_INPUT_EXT',
31 'RULE_INPUT_NAME',
32 'RULE_INPUT_PATH',
33 'SHARED_INTERMEDIATE_DIR',
34 ]
35
36 generator_default_variables = {
37 }
38
39 for v in _generator_identity_variables:
40 generator_default_variables[v] = '<(%s)' % v
41
42
43 def GenerateOutput(target_list, target_dicts, data, params):
44 locals = {
45 'target_list': target_list,
46 'target_dicts': target_dicts,
47 'data': data,
48 }
49
50 # Use a banner that looks like the stock Python one and like what
51 # code.interact uses by default, but tack on something to indicate what
52 # locals are available, and identify gypsh.
53 banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
54 (sys.version, sys.platform, repr(sorted(locals.keys())))
55
56 code.interact(banner, local=locals)
+0
-1283
mozc_build_tools/gyp/pylib/gyp/generator/make.py less more
0 #!/usr/bin/python
1
2 # Copyright (c) 2009 Google Inc. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # Notes:
7 #
8 # This is all roughly based on the Makefile system used by the Linux
9 # kernel, but is a non-recursive make -- we put the entire dependency
10 # graph in front of make and let it figure it out.
11 #
12 # The code below generates a separate .mk file for each target, but
13 # all are sourced by the top-level Makefile. This means that all
14 # variables in .mk-files clobber one another. Be careful to use :=
15 # where appropriate for immediate evaluation, and similarly to watch
16 # that you're not relying on a variable value to last beween different
17 # .mk files.
18 #
19 # TODOs:
20 #
21 # Global settings and utility functions are currently stuffed in the
22 # toplevel Makefile. It may make sense to generate some .mk files on
23 # the side to keep the the files readable.
24
25 import gyp
26 import gyp.common
27 import os.path
28
29 # Debugging-related imports -- remove me once we're solid.
30 import code
31 import pprint
32
33 generator_default_variables = {
34 'EXECUTABLE_PREFIX': '',
35 'EXECUTABLE_SUFFIX': '',
36 'OS': 'linux',
37 'STATIC_LIB_PREFIX': 'lib',
38 'SHARED_LIB_PREFIX': 'lib',
39 'STATIC_LIB_SUFFIX': '.a',
40 'SHARED_LIB_SUFFIX': '.so',
41 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni',
42 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
43 'PRODUCT_DIR': '$(builddir)',
44 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
45 'LIB_DIR': '$(obj).$(TOOLSET)',
46 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
47 'RULE_INPUT_PATH': '$(abspath $<)',
48 'RULE_INPUT_EXT': '$(suffix $<)',
49 'RULE_INPUT_NAME': '$(notdir $<)',
50
51 # This appears unused --- ?
52 'CONFIGURATION_NAME': '$(BUILDTYPE)',
53 }
54
55 # Make supports multiple toolsets
56 generator_supports_multiple_toolsets = True
57
58 def ensure_directory_exists(path):
59 dir = os.path.dirname(path)
60 if dir and not os.path.exists(dir):
61 os.makedirs(dir)
62
63 # Header of toplevel Makefile.
64 # This should go into the build tree, but it's easier to keep it here for now.
65 SHARED_HEADER = ("""\
66 # We borrow heavily from the kernel build setup, though we are simpler since
67 # we don't have Kconfig tweaking settings on us.
68
69 # The implicit make rules have it looking for RCS files, among other things.
70 # We instead explicitly write all the rules we care about.
71 # It's even quicker (saves ~200ms) to pass -r on the command line.
72 MAKEFLAGS=-r
73
74 # The V=1 flag on command line makes us verbosely print command lines.
75 ifdef V
76 quiet=
77 else
78 quiet=quiet_
79 endif
80
81 # Specify BUILDTYPE=Release on the command line for a release build.
82 BUILDTYPE ?= __default_configuration__
83
84 # Directory all our build output goes into.
85 # Note that this must be two directories beneath src/ for unit tests to pass,
86 # as they reach into the src/ directory for data with relative paths.
87 builddir ?= $(builddir_name)/$(BUILDTYPE)
88 abs_builddir := $(abspath $(builddir))
89 depsdir := $(builddir)/.deps
90
91 # Object output directory.
92 obj := $(builddir)/obj
93 abs_obj := $(abspath $(obj))
94
95 # We build up a list of every single one of the targets so we can slurp in the
96 # generated dependency rule Makefiles in one pass.
97 all_deps :=
98
99 # C++ apps need to be linked with g++. Not sure what's appropriate.
100 LINK ?= $(CXX)
101
102 CC.target ?= $(CC)
103 CFLAGS.target ?= $(CFLAGS)
104 CXX.target ?= $(CXX)
105 CXXFLAGS.target ?= $(CXXFLAGS)
106 LINK.target ?= $(LINK)
107 LDFLAGS.target ?= $(LDFLAGS)
108 AR.target ?= $(AR)
109 RANLIB.target ?= ranlib
110
111 CC.host ?= gcc
112 CFLAGS.host ?=
113 CXX.host ?= g++
114 CXXFLAGS.host ?=
115 LINK.host ?= g++
116 LDFLAGS.host ?=
117 AR.host ?= ar
118 RANLIB.host ?= ranlib
119
120 # Flags to make gcc output dependency info. Note that you need to be
121 # careful here to use the flags that ccache and distcc can understand.
122 # We write to a dep file on the side first and then rename at the end
123 # so we can't end up with a broken dep file.
124 depfile = $(depsdir)/$@.d
125 DEPFLAGS = -MMD -MF $(depfile).raw
126
127 # We have to fixup the deps output in a few ways.
128 # (1) the file output should mention the proper .o file.
129 # ccache or distcc lose the path to the target, so we convert a rule of
130 # the form:
131 # foobar.o: DEP1 DEP2
132 # into
133 # path/to/foobar.o: DEP1 DEP2
134 # (2) we want missing files not to cause us to fail to build.
135 # We want to rewrite
136 # foobar.o: DEP1 DEP2 \\
137 # DEP3
138 # to
139 # DEP1:
140 # DEP2:
141 # DEP3:
142 # so if the files are missing, they're just considered phony rules.
143 # We have to do some pretty insane escaping to get those backslashes
144 # and dollar signs past make, the shell, and sed at the same time."""
145 r"""
146 define fixup_dep
147 # Fixup path as in (1).
148 sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
149 # Add extra rules as in (2).
150 # We remove slashes and replace spaces with new lines;
151 # remove blank lines;
152 # delete the first line and append a colon to the remaining lines.
153 sed -e 's|\\||' -e 's| |\n|g' $(depfile).raw |\
154 grep -v '^$$' |\
155 sed -e 1d -e 's|$$|:|' \
156 >> $(depfile)
157 rm $(depfile).raw
158 endef
159 """
160 """
161 # Command definitions:
162 # - cmd_foo is the actual command to run;
163 # - quiet_cmd_foo is the brief-output summary of the command.
164
165 quiet_cmd_cc = CC($(TOOLSET)) $@
166 cmd_cc = $(CC.$(TOOLSET)) $(CFLAGS.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) -c -o $@ $<
167
168 quiet_cmd_cxx = CXX($(TOOLSET)) $@
169 cmd_cxx = $(CXX.$(TOOLSET)) $(CXXFLAGS.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) -c -o $@ $<
170
171 quiet_cmd_alink = AR+RANLIB($(TOOLSET)) $@
172 cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) rc $@ $(filter %.o,$^) && $(RANLIB.$(TOOLSET)) $@
173
174 quiet_cmd_touch = TOUCH $@
175 cmd_touch = touch $@
176
177 quiet_cmd_copy = COPY $@
178 # send stderr to /dev/null to ignore messages when linking directories.
179 cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@
180
181 # Due to circular dependencies between libraries :(, we wrap the
182 # special "figure out circular dependencies" flags around the entire
183 # input list during linking.
184 quiet_cmd_link = LINK($(TOOLSET)) $@
185 cmd_link = $(LINK.$(TOOLSET)) $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
186
187 # Shared-object link (for generating .so).
188 # Set SONAME to the library filename so our binaries don't reference the local,
189 # absolute paths used on the link command-line.
190 # TODO: perhaps this can share with the LINK command above?
191 quiet_cmd_solink = SOLINK($(TOOLSET)) $@
192 cmd_solink = $(LINK.$(TOOLSET)) -shared $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
193 """
194 r"""
195 # Define an escape_quotes function to escape single quotes.
196 # This allows us to handle quotes properly as long as we always use
197 # use single quotes and escape_quotes.
198 escape_quotes = $(subst ','\'',$(1))
199 # This comment is here just to include a ' to unconfuse syntax highlighting.
200 # Define an escape_vars function to escape '$' variable syntax.
201 # This allows us to read/write command lines with shell variables (e.g.
202 # $LD_LIBRARY_PATH), without triggering make substitution.
203 escape_vars = $(subst $$,$$$$,$(1))
204 # Helper that expands to a shell command to echo a string exactly as it is in
205 # make. This uses printf instead of echo because printf's behaviour with respect
206 # to escape sequences is more portable than echo's across different shells
207 # (e.g., dash, bash).
208 exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
209 """
210 """
211 # Helper to compare the command we're about to run against the command
212 # we logged the last time we ran the command. Produces an empty
213 # string (false) when the commands match.
214 # Tricky point: Make has no string-equality test function.
215 # The kernel uses the following, but it seems like it would have false
216 # positives, where one string reordered its arguments.
217 # arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
218 # $(filter-out $(cmd_$@), $(cmd_$(1))))
219 # We instead substitute each for the empty string into the other, and
220 # say they're equal if both substitutions produce the empty string.
221 command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\
222 $(subst $(cmd_$@),,$(cmd_$(1))))
223
224 # Helper that is non-empty when a prerequisite changes.
225 # Normally make does this implicitly, but we force rules to always run
226 # so we can check their command lines.
227 # $? -- new prerequisites
228 # $| -- order-only dependencies
229 prereq_changed = $(filter-out $|,$?)
230
231 # do_cmd: run a command via the above cmd_foo names, if necessary.
232 # Should always run for a given target to handle command-line changes.
233 # Second argument, if non-zero, makes it do C/C++ dependency munging.
234 define do_cmd
235 $(if $(or $(command_changed),$(prereq_changed)),
236 @$(call exact_echo, $($(quiet)cmd_$(1)))
237 @mkdir -p $(dir $@) $(dir $(depfile))
238 @$(cmd_$(1))
239 @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile)
240 @$(if $(2),$(fixup_dep))
241 )
242 endef
243
244 # Declare "all" target first so it is the default, even though we don't have the
245 # deps yet.
246 .PHONY: all
247 all:
248
249 # make looks for ways to re-generate included makefiles, but in our case, we
250 # don't have a direct way. Explicitly telling make that it has nothing to do
251 # for them makes it go faster.
252 %.d: ;
253
254 # Use FORCE_DO_CMD to force a target to run. Should be coupled with
255 # do_cmd.
256 .PHONY: FORCE_DO_CMD
257 FORCE_DO_CMD:
258
259 """)
260
261 ROOT_HEADER_SUFFIX_RULES = ("""\
262 # Suffix rules, putting all outputs into $(obj).
263 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
264 @$(call do_cmd,cc,1)
265 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
266 @$(call do_cmd,cc)
267 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
268 @$(call do_cmd,cc)
269 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
270 @$(call do_cmd,cxx,1)
271 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
272 @$(call do_cmd,cxx,1)
273 $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
274 @$(call do_cmd,cxx,1)
275
276 # Try building from generated source, too.
277 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
278 @$(call do_cmd,cc,1)
279 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
280 @$(call do_cmd,cc)
281 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
282 @$(call do_cmd,cc)
283 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
284 @$(call do_cmd,cxx,1)
285 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
286 @$(call do_cmd,cxx,1)
287 $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
288 @$(call do_cmd,cxx,1)
289
290 $(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
291 @$(call do_cmd,cc,1)
292 $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
293 @$(call do_cmd,cc)
294 $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
295 @$(call do_cmd,cc)
296 $(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
297 @$(call do_cmd,cxx,1)
298 $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
299 @$(call do_cmd,cxx,1)
300 $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
301 @$(call do_cmd,cxx,1)
302 """)
303
304 SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
305 # Suffix rules, putting all outputs into $(obj).
306 """)
307
308 SHARED_HEADER_SUFFIX_RULES_SRCDIR = {
309 '.c': ("""\
310 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
311 @$(call do_cmd,cc,1)
312 """),
313 '.s': ("""\
314 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
315 @$(call do_cmd,cc)
316 """),
317 '.S': ("""\
318 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
319 @$(call do_cmd,cc)
320 """),
321 '.cpp': ("""\
322 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
323 @$(call do_cmd,cxx,1)
324 """),
325 '.cc': ("""\
326 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
327 @$(call do_cmd,cxx,1)
328 """),
329 '.cxx': ("""\
330 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
331 @$(call do_cmd,cxx,1)
332 """),
333 }
334
335 SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
336 # Try building from generated source, too.
337 """)
338
339 SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = {
340 '.c': ("""\
341 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
342 @$(call do_cmd,cc,1)
343 """),
344 '.cc': ("""\
345 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
346 @$(call do_cmd,cxx,1)
347 """),
348 '.cpp': ("""\
349 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
350 @$(call do_cmd,cxx,1)
351 """),
352 }
353
354 SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = {
355 '.c': ("""\
356 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
357 @$(call do_cmd,cc,1)
358 """),
359 '.cc': ("""\
360 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
361 @$(call do_cmd,cxx,1)
362 """),
363 '.cpp': ("""\
364 $(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
365 @$(call do_cmd,cxx,1)
366 """),
367 }
368
369 SHARED_HEADER_SUFFIX_RULES = (
370 SHARED_HEADER_SUFFIX_RULES_COMMENT1 +
371 ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) +
372 SHARED_HEADER_SUFFIX_RULES_COMMENT2 +
373 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) +
374 ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values())
375 )
376
377 # This gets added to the very beginning of the Makefile.
378 SHARED_HEADER_SRCDIR = ("""\
379 # The source directory tree.
380 srcdir := %s
381
382 """)
383
384 SHARED_HEADER_BUILDDIR_NAME = ("""\
385 # The name of the builddir.
386 builddir_name ?= %s
387
388 """)
389
390 SHARED_FOOTER = """\
391 # "all" is a concatenation of the "all" targets from all the included
392 # sub-makefiles. This is just here to clarify.
393 all:
394
395 # Add in dependency-tracking rules. $(all_deps) is the list of every single
396 # target in our tree. First, only consider targets that already have been
397 # built, as unbuilt targets will be built regardless of dependency info:
398 all_deps := $(wildcard $(sort $(all_deps)))
399 # Of those, only consider the ones with .d (dependency) info:
400 d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
401 ifneq ($(d_files),)
402 include $(d_files)
403 endif
404 """
405
406 header = """\
407 # This file is generated by gyp; do not edit.
408
409 """
410
411
412 def Compilable(filename):
413 """Return true if the file is compilable (should be in OBJS)."""
414 for res in (filename.endswith(e) for e
415 in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']):
416 if res:
417 return True
418 return False
419
420
421 def Target(filename):
422 """Translate a compilable filename to its .o target."""
423 return os.path.splitext(filename)[0] + '.o'
424
425
426 def EscapeShellArgument(s):
427 """Quotes an argument so that it will be interpreted literally by a POSIX
428 shell. Taken from
429 http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
430 """
431 return "'" + s.replace("'", "'\\''") + "'"
432
433
434 def EscapeMakeVariableExpansion(s):
435 """Make has its own variable expansion syntax using $. We must escape it for
436 string to be interpreted literally."""
437 return s.replace('$', '$$')
438
439
440 def EscapeCppDefine(s):
441 """Escapes a CPP define so that it will reach the compiler unaltered."""
442 s = EscapeShellArgument(s)
443 s = EscapeMakeVariableExpansion(s)
444 return s
445
446
447 def QuoteIfNecessary(string):
448 """TODO: Should this ideally be replaced with one or more of the above
449 functions?"""
450 if '"' in string:
451 string = '"' + string.replace('"', '\\"') + '"'
452 return string
453
454
455 srcdir_prefix = ''
456 def Sourceify(path):
457 """Convert a path to its source directory form."""
458 if '$(' in path:
459 return path
460 if os.path.isabs(path):
461 return path
462 return srcdir_prefix + path
463
464
465 # Map from qualified target to path to output.
466 target_outputs = {}
467 # Map from qualified target to a list of all linker dependencies,
468 # transitively expanded.
469 # Used in building shared-library-based executables.
470 target_link_deps = {}
471
472
473 class MakefileWriter:
474 """MakefileWriter packages up the writing of one target-specific foobar.mk.
475
476 Its only real entry point is Write(), and is mostly used for namespacing.
477 """
478
479 def Write(self, qualified_target, base_path, output_filename, spec, configs,
480 part_of_all):
481 """The main entry point: writes a .mk file for a single target.
482
483 Arguments:
484 qualified_target: target we're generating
485 base_path: path relative to source root we're building in, used to resolve
486 target-relative paths
487 output_filename: output .mk file name to write
488 spec, configs: gyp info
489 part_of_all: flag indicating this target is part of 'all'
490 """
491 print 'Generating %s' % output_filename
492
493 ensure_directory_exists(output_filename)
494
495 self.fp = open(output_filename, 'w')
496
497 self.fp.write(header)
498
499 self.path = base_path
500 self.target = spec['target_name']
501 self.type = spec['type']
502 self.toolset = spec['toolset']
503
504 deps, link_deps = self.ComputeDeps(spec)
505
506 # Some of the generation below can add extra output, sources, or
507 # link dependencies. All of the out params of the functions that
508 # follow use names like extra_foo.
509 extra_outputs = []
510 extra_sources = []
511 extra_link_deps = []
512
513 self.output = self.ComputeOutput(spec)
514 self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
515 'shared_library')
516 if self.type in self._INSTALLABLE_TARGETS:
517 self.alias = os.path.basename(self.output)
518 else:
519 self.alias = self.output
520
521 self.WriteLn("TOOLSET := " + self.toolset)
522 self.WriteLn("TARGET := " + self.target)
523
524 # Actions must come first, since they can generate more OBJs for use below.
525 if 'actions' in spec:
526 self.WriteActions(spec['actions'], extra_sources, extra_outputs,
527 part_of_all)
528
529 # Rules must be early like actions.
530 if 'rules' in spec:
531 self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all)
532
533 if 'copies' in spec:
534 self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
535
536 all_sources = spec.get('sources', []) + extra_sources
537 if all_sources:
538 self.WriteSources(configs, deps, all_sources,
539 extra_outputs, extra_link_deps, part_of_all)
540 sources = filter(Compilable, all_sources)
541 if sources:
542 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
543 extensions = set([os.path.splitext(s)[1] for s in sources])
544 for ext in extensions:
545 if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR:
546 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext])
547 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
548 for ext in extensions:
549 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1:
550 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext])
551 for ext in extensions:
552 if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2:
553 self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext])
554 self.WriteLn('# End of this set of suffix rules')
555
556
557 self.WriteTarget(spec, configs, deps,
558 extra_link_deps + link_deps, extra_outputs, part_of_all)
559
560 # Update global list of target outputs, used in dependency tracking.
561 target_outputs[qualified_target] = self.alias
562
563 # Update global list of link dependencies.
564 if self.type == 'static_library':
565 target_link_deps[qualified_target] = [self.output]
566 elif self.type == 'shared_library':
567 # Anyone that uses us transitively depend on all of our link
568 # dependencies.
569 target_link_deps[qualified_target] = [self.output] + link_deps
570
571 self.fp.close()
572
573
574 def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
575 """Write a "sub-project" Makefile.
576
577 This is a small, wrapper Makefile that calls the top-level Makefile to build
578 the targets from a single gyp file (i.e. a sub-project).
579
580 Arguments:
581 output_filename: sub-project Makefile name to write
582 makefile_path: path to the top-level Makefile
583 targets: list of "all" targets for this sub-project
584 build_dir: build output directory, relative to the sub-project
585 """
586 print 'Generating %s' % output_filename
587
588 ensure_directory_exists(output_filename)
589 self.fp = open(output_filename, 'w')
590 self.fp.write(header)
591 # For consistency with other builders, put sub-project build output in the
592 # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
593 self.WriteLn('export builddir_name ?= %s' %
594 os.path.join(os.path.dirname(output_filename), build_dir))
595 self.WriteLn('.PHONY: all')
596 self.WriteLn('all:')
597 if makefile_path:
598 makefile_path = ' -C ' + makefile_path
599 self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
600 self.fp.close()
601
602
603 def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all):
604 """Write Makefile code for any 'actions' from the gyp input.
605
606 extra_sources: a list that will be filled in with newly generated source
607 files, if any
608 extra_outputs: a list that will be filled in with any outputs of these
609 actions (used to make other pieces dependent on these
610 actions)
611 part_of_all: flag indicating this target is part of 'all'
612 """
613 for action in actions:
614 name = self.target + '_' + action['action_name']
615 self.WriteLn('### Rules for action "%s":' % action['action_name'])
616 inputs = action['inputs']
617 outputs = action['outputs']
618
619 # Build up a list of outputs.
620 # Collect the output dirs we'll need.
621 dirs = set()
622 for out in outputs:
623 dir = os.path.split(out)[0]
624 if dir:
625 dirs.add(dir)
626 if int(action.get('process_outputs_as_sources', False)):
627 extra_sources += outputs
628
629 # Write the actual command.
630 command = gyp.common.EncodePOSIXShellList(action['action'])
631 if 'message' in action:
632 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
633 else:
634 self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
635 if len(dirs) > 0:
636 command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
637 # Set LD_LIBRARY_PATH in case the action runs an executable from this
638 # build which links to shared libs from this build.
639 if self.path:
640 cd_action = 'cd %s; ' % Sourceify(self.path)
641 else:
642 cd_action = ''
643 # actions run on the host, so they should in theory only use host
644 # libraries, but until everything is made cross-compile safe, also use
645 # target libraries.
646 # TODO(piman): when everything is cross-compile safe, remove lib.target
647 self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:'
648 '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s'
649 % (name, cd_action, command))
650 self.WriteLn()
651 outputs = map(self.Absolutify, outputs)
652 # The makefile rules are all relative to the top dir, but the gyp actions
653 # are defined relative to their containing dir. This replaces the obj
654 # variable for the action rule with an absolute version so that the output
655 # goes in the right place.
656 # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
657 # it's superfluous for the "extra outputs", and this avoids accidentally
658 # writing duplicate dummy rules for those outputs.
659 self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)'])
660 self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)'])