diff --git a/base/const.h b/base/const.h index 74172c6..0b80432 100755 --- a/base/const.h +++ b/base/const.h @@ -49,8 +49,10 @@ = "\\\\.\\mailslot\\googlejapaneseinput.character_pad."; #ifdef GOOGLE_JAPANESE_INPUT_BUILD const char kEventPathPrefix[] = "Global\\GoogleJapaneseInput.event."; +const char kMutexPathPrefix[] = "Global\\GoogleJapaneseInput.mutex."; #else const char kEventPathPrefix[] = "Global\\Mozc.event."; +const char kMutexPathPrefix[] = "Global\\Mozc.mutex."; #endif // GOOGLE_JAPANESE_INPUT_BUILD const char kMozcServerName[] = "GoogleIMEJaConverter.exe"; const char kIMEFile[] = "GoogleIMEJa.ime"; diff --git a/data/images/unix/ime_product_icon_opensource-32.png b/data/images/unix/ime_product_icon_opensource-32.png new file mode 100755 index 0000000..357c92d Binary files /dev/null and b/data/images/unix/ime_product_icon_opensource-32.png differ diff --git a/data/images/unix/ui-alpha_full.png b/data/images/unix/ui-alpha_full.png new file mode 100755 index 0000000..48d32b4 Binary files /dev/null and b/data/images/unix/ui-alpha_full.png differ diff --git a/data/images/unix/ui-alpha_half.png b/data/images/unix/ui-alpha_half.png new file mode 100755 index 0000000..bcddb7d Binary files /dev/null and b/data/images/unix/ui-alpha_half.png differ diff --git a/data/images/unix/ui-dictionary.png b/data/images/unix/ui-dictionary.png new file mode 100755 index 0000000..66dc79b Binary files /dev/null and b/data/images/unix/ui-dictionary.png differ diff --git a/data/images/unix/ui-direct.png b/data/images/unix/ui-direct.png new file mode 100755 index 0000000..55f229a Binary files /dev/null and b/data/images/unix/ui-direct.png differ diff --git a/data/images/unix/ui-hiragana.png b/data/images/unix/ui-hiragana.png new file mode 100755 index 0000000..05e4cf6 Binary files /dev/null and b/data/images/unix/ui-hiragana.png differ diff --git a/data/images/unix/ui-katakana_full.png b/data/images/unix/ui-katakana_full.png new file mode 100755 index 0000000..6fad70d Binary files /dev/null and b/data/images/unix/ui-katakana_full.png differ diff --git a/data/images/unix/ui-katakana_half.png b/data/images/unix/ui-katakana_half.png new file mode 100755 index 0000000..64a5c40 Binary files /dev/null and b/data/images/unix/ui-katakana_half.png differ diff --git a/data/images/unix/ui-properties.png b/data/images/unix/ui-properties.png new file mode 100755 index 0000000..b80b706 Binary files /dev/null and b/data/images/unix/ui-properties.png differ diff --git a/data/images/unix/ui-tool.png b/data/images/unix/ui-tool.png new file mode 100755 index 0000000..156b16d Binary files /dev/null and b/data/images/unix/ui-tool.png differ diff --git a/gui/about_dialog/about_dialog_en.qm b/gui/about_dialog/about_dialog_en.qm index c4732aa..bd6b2a9 100755 Binary files a/gui/about_dialog/about_dialog_en.qm and b/gui/about_dialog/about_dialog_en.qm differ diff --git a/gui/about_dialog/about_dialog_ja.qm b/gui/about_dialog/about_dialog_ja.qm index 8e458d6..f0650ca 100755 Binary files a/gui/about_dialog/about_dialog_ja.qm and b/gui/about_dialog/about_dialog_ja.qm differ diff --git a/gui/administration_dialog/administration_dialog_en.qm b/gui/administration_dialog/administration_dialog_en.qm index ea65a76..f830e6f 100755 Binary files a/gui/administration_dialog/administration_dialog_en.qm and b/gui/administration_dialog/administration_dialog_en.qm differ diff --git a/gui/administration_dialog/administration_dialog_ja.qm b/gui/administration_dialog/administration_dialog_ja.qm index aeea14c..24d9b3a 100755 Binary files a/gui/administration_dialog/administration_dialog_ja.qm and b/gui/administration_dialog/administration_dialog_ja.qm differ diff --git a/gui/config_dialog/config_dialog_en.qm b/gui/config_dialog/config_dialog_en.qm index 30550ae..1252553 100755 Binary files a/gui/config_dialog/config_dialog_en.qm and b/gui/config_dialog/config_dialog_en.qm differ diff --git a/gui/config_dialog/config_dialog_ja.qm b/gui/config_dialog/config_dialog_ja.qm index 3880e9e..e0783da 100755 Binary files a/gui/config_dialog/config_dialog_ja.qm and b/gui/config_dialog/config_dialog_ja.qm differ diff --git a/gui/config_dialog/keymap_en.qm b/gui/config_dialog/keymap_en.qm index 34de2b7..84797a7 100755 Binary files a/gui/config_dialog/keymap_en.qm and b/gui/config_dialog/keymap_en.qm differ diff --git a/gui/config_dialog/keymap_ja.qm b/gui/config_dialog/keymap_ja.qm index b7b8031..56a02c6 100755 Binary files a/gui/config_dialog/keymap_ja.qm and b/gui/config_dialog/keymap_ja.qm differ diff --git a/gui/confirmation_dialog/confirmation_dialog_en.qm b/gui/confirmation_dialog/confirmation_dialog_en.qm index 27a625b..fc0951a 100755 Binary files a/gui/confirmation_dialog/confirmation_dialog_en.qm and b/gui/confirmation_dialog/confirmation_dialog_en.qm differ diff --git a/gui/confirmation_dialog/confirmation_dialog_ja.qm b/gui/confirmation_dialog/confirmation_dialog_ja.qm index 88cc35a..cbe295b 100755 Binary files a/gui/confirmation_dialog/confirmation_dialog_ja.qm and b/gui/confirmation_dialog/confirmation_dialog_ja.qm differ diff --git a/gui/dictionary_tool/dictionary_tool_en.qm b/gui/dictionary_tool/dictionary_tool_en.qm index cc5070a..78c97ef 100755 Binary files a/gui/dictionary_tool/dictionary_tool_en.qm and b/gui/dictionary_tool/dictionary_tool_en.qm differ diff --git a/gui/dictionary_tool/dictionary_tool_ja.qm b/gui/dictionary_tool/dictionary_tool_ja.qm index 323780c..7e5a171 100755 Binary files a/gui/dictionary_tool/dictionary_tool_ja.qm and b/gui/dictionary_tool/dictionary_tool_ja.qm differ diff --git a/gui/error_message_dialog/error_message_dialog_en.qm b/gui/error_message_dialog/error_message_dialog_en.qm index 977b1c1..4da917f 100755 Binary files a/gui/error_message_dialog/error_message_dialog_en.qm and b/gui/error_message_dialog/error_message_dialog_en.qm differ diff --git a/gui/error_message_dialog/error_message_dialog_ja.qm b/gui/error_message_dialog/error_message_dialog_ja.qm index 70ec830..ba2212f 100755 Binary files a/gui/error_message_dialog/error_message_dialog_ja.qm and b/gui/error_message_dialog/error_message_dialog_ja.qm differ diff --git a/gui/post_install_dialog/post_install_dialog_en.qm b/gui/post_install_dialog/post_install_dialog_en.qm index febe589..a6d7684 100755 Binary files a/gui/post_install_dialog/post_install_dialog_en.qm and b/gui/post_install_dialog/post_install_dialog_en.qm differ diff --git a/gui/post_install_dialog/post_install_dialog_ja.qm b/gui/post_install_dialog/post_install_dialog_ja.qm index 1190a6d..5e2eace 100755 Binary files a/gui/post_install_dialog/post_install_dialog_ja.qm and b/gui/post_install_dialog/post_install_dialog_ja.qm differ diff --git a/gui/set_default_dialog/set_default_dialog_en.qm b/gui/set_default_dialog/set_default_dialog_en.qm index 0412e32..b877e6d 100755 Binary files a/gui/set_default_dialog/set_default_dialog_en.qm and b/gui/set_default_dialog/set_default_dialog_en.qm differ diff --git a/gui/set_default_dialog/set_default_dialog_ja.qm b/gui/set_default_dialog/set_default_dialog_ja.qm index b955dd6..e25786f 100755 Binary files a/gui/set_default_dialog/set_default_dialog_ja.qm and b/gui/set_default_dialog/set_default_dialog_ja.qm differ diff --git a/ipc/ipc_path_manager.cc b/ipc/ipc_path_manager.cc index 0ed568f..0e44a16 100755 --- a/ipc/ipc_path_manager.cc +++ b/ipc/ipc_path_manager.cc @@ -37,7 +37,12 @@ #ifdef OS_WINDOWS #include #include // GetModuleFileNameExW -#endif +#else +// For stat system call +#include +#include +#include +#endif // OS_WINDOWS #ifdef OS_MACOSX #include @@ -176,7 +181,8 @@ : mutex_(new Mutex), ipc_path_info_(new ipc::IPCPathInfo), name_(name), - server_pid_(0) {} + server_pid_(0), + last_modified_(-1) {} IPCPathManager::~IPCPathManager() {} @@ -234,6 +240,7 @@ VLOG(1) << "ServerIPCKey: " << ipc_path_info_->key(); + last_modified_ = GetIPCFileTimeStamp(); return true; } @@ -243,7 +250,7 @@ return false; } - if (ipc_path_info_->key().empty() && !LoadPathName()) { + if ((ShouldReload() || ipc_path_info_->key().empty()) && !LoadPathName()) { LOG(ERROR) << "GetPathName failed"; return false; } @@ -380,6 +387,39 @@ return false; } +bool IPCPathManager::ShouldReload() const { +#ifdef OS_WINDOWS + // In windows, no reloading mechanism is necessary because IPC files + // are automatically removed. + return false; +#else + scoped_lock l(mutex_.get()); + + time_t last_modified = GetIPCFileTimeStamp(); + if (last_modified == last_modified_) { + return false; + } + + return true; +#endif // OS_WINDOWS +} + +time_t IPCPathManager::GetIPCFileTimeStamp() const { +#ifdef OS_WINDOWS + // In windows, we don't need to get the exact file timestamp, so + // just returns -1 at this time. + return static_cast(-1); +#else + const string filename = GetIPCKeyFileName(name_); + struct stat filestat; + if (::stat(filename.c_str(), &filestat) == -1) { + VLOG(2) << "stat(2) failed. Skipping reload"; + return static_cast(-1); + } + return filestat.st_mtime; +#endif // OS_WINDOWS +} + bool IPCPathManager::LoadPathName() { scoped_lock l(mutex_.get()); @@ -460,6 +500,7 @@ VLOG(1) << "ClientIPCKey: " << ipc_path_info_->key(); VLOG(1) << "ProtocolVersion: " << ipc_path_info_->protocol_version(); + last_modified_ = GetIPCFileTimeStamp(); return true; } } // namespace mozc diff --git a/ipc/ipc_path_manager.h b/ipc/ipc_path_manager.h index 44896b2..75f1343 100755 --- a/ipc/ipc_path_manager.h +++ b/ipc/ipc_path_manager.h @@ -30,9 +30,16 @@ #ifndef MOZC_IPC_IPC_PATH_MANAGER_H_ #define MOZC_IPC_IPC_PATH_MANAGER_H_ +#ifdef OS_WINDOWS +#include // for time_t +#else +#include // for time_t +#endif // OS_WINDOWS #include #include "base/base.h" #include "base/mutex.h" +// For FRIEND_TEST +#include "testing/base/public/gunit_prod.h" namespace mozc { @@ -93,10 +100,17 @@ virtual ~IPCPathManager(); private: + FRIEND_TEST(IPCPathManagerTest, ReloadTest); // Load ipc name from ~/.mozc/.ipc // Note that this method overwrites the ipc_key_ bool LoadPathName(); + + // Returns true if the ipc file is updated after it load. + bool ShouldReload() const; + + // Returns the last modified timestamp of the IPC file. + time_t GetIPCFileTimeStamp() const; scoped_ptr path_mutex_; // lock ipc path file scoped_ptr mutex_; // mutex for methods @@ -104,6 +118,7 @@ string name_; string server_path_; // cache for server_path uint32 server_pid_; // cache for pid of server_path + time_t last_modified_; }; } // mozc diff --git a/ipc/ipc_path_manager_test.cc b/ipc/ipc_path_manager_test.cc index 5089b06..fbfcdc4 100755 --- a/ipc/ipc_path_manager_test.cc +++ b/ipc/ipc_path_manager_test.cc @@ -28,6 +28,8 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "base/base.h" +#include "base/file_stream.h" +#include "base/process_mutex.h" #include "base/util.h" #include "base/thread.h" #include "ipc/ipc_path_manager.h" @@ -72,6 +74,7 @@ } } }; +} // anonymous namespace TEST(IPCPathManagerTest, IPCPathManagerTest) { mozc::Util::SetUserProfileDirectory(FLAGS_test_tmpdir); @@ -102,5 +105,29 @@ threads[i].Join(); } } -} // namespace + +TEST(IPCPathManagerTest, ReloadTest) { + // We have only mock implementations for Windows, so no test should be run. +#ifndef OS_WINDOWS + mozc::IPCPathManager *manager = + mozc::IPCPathManager::GetIPCPathManager("reload_test"); + + EXPECT_TRUE(manager->CreateNewPathName()); + EXPECT_TRUE(manager->SavePathName()); + + // Just after the save, there are no need to reload. + EXPECT_FALSE(manager->ShouldReload()); + + // Modify the saved file explicitly. + EXPECT_TRUE(manager->path_mutex_->UnLock()); + Util::Sleep(1000 /* msec */); + string filename = Util::JoinPath( + Util::GetUserProfileDirectory(), ".reload_test.ipc"); + OutputFileStream outf(filename.c_str()); + outf << "foobar"; + outf.close(); + + EXPECT_TRUE(manager->ShouldReload()); +#endif // OS_WINDOWS +} } // mozc diff --git a/ipc/win32_ipc.cc b/ipc/win32_ipc.cc index 1c0b7c1..ada0408 100755 --- a/ipc/win32_ipc.cc +++ b/ipc/win32_ipc.cc @@ -35,7 +35,9 @@ #include #include #include "base/base.h" +#include "base/const.h" #include "base/mutex.h" +#include "base/singleton.h" #include "base/util.h" #include "ipc/ipc_path_manager.h" #include "third_party/mozc/sandbox/security_attributes.h" @@ -67,6 +69,84 @@ reinterpret_cast (::GetProcAddress(lib, "GetNamedPipeServerProcessId")); } + +class IPCClientMutex { + public: + IPCClientMutex() { + // Make a kernel mutex object so that multiple ipc connections are + // serialized here. In Windows, there is no useful way to serialize + // the multiple connections to the single-thread named pipe server. + // WaitForNamedPipe doesn't work for this propose as it just lets + // clients know that the connection becomes "available" right now. + // It doesn't mean that connection is available for the current + /// thread. The "available" notification is sent to all waiting ipc + // clients at the same time and only one client gets the connection. + // This causes redundant and wasteful CreateFile calles. + string mutex_name = kMutexPathPrefix; + mutex_name += Util::GetUserSidAsString(); + mutex_name += ".ipc"; + wstring wmutex_name; + Util::UTF8ToWide(mutex_name.c_str(), &wmutex_name); + + LPSECURITY_ATTRIBUTES security_attributes_ptr = NULL; + SECURITY_ATTRIBUTES security_attributes; + if (!sandbox::MakeSecurityAttributes(&security_attributes)) { + LOG(ERROR) << "Cannot make SecurityAttributes"; + } else { + security_attributes_ptr = &security_attributes; + } + + // http://msdn.microsoft.com/en-us/library/ms682411(VS.85).aspx: + // Two or more processes can call CreateMutex to create the same named + // mutex. The first process actually creates the mutex, and subsequent + // processes with sufficient access rights simply open a handle to + // the existing mutex. This enables multiple processes to get handles + // of the same mutex, while relieving the user of the responsibility + // of ensuring that the creating process is started first. + // When using this technique, you should set the + // bInitialOwner flag to FALSE; otherwise, it can be difficult to be + // certain which process has initial ownership. + ipc_mutex_.reset(::CreateMutex(security_attributes_ptr, + FALSE, wmutex_name.c_str())); + + if (ipc_mutex_.get() == NULL) { + LOG(ERROR) << "CreateMutex failed: " << ::GetLastError(); + return; + } + + // permit the access from a process runinning with low integrity level + if (Util::IsVistaOrLater()) { + sandbox::SetMandatoryLabelW(ipc_mutex_.get(), + SE_KERNEL_OBJECT, L"NX", L"LW"); + } + } + + virtual ~IPCClientMutex() {} + + HANDLE get() const { + return ipc_mutex_.get(); + } + + private: + ScopedHandle ipc_mutex_; +}; + +// RAII class for calling ReleaseMutex in destructor. +class ScopedReleaseMutex { + public: + ScopedReleaseMutex(HANDLE handle) + : handle_(handle) {} + + virtual ~ScopedReleaseMutex() { + if (NULL != handle_) { + ::ReleaseMutex(handle_); + } + } + + HANDLE get() const { return handle_; } + private: + HANDLE handle_; +}; uint32 GetServerProcessId(HANDLE handle) { CallOnce(&g_once, &InitFPGetNamedPipeServerProcessId); @@ -386,6 +466,33 @@ void IPCClient::Init(const string &name, const string &server_path) { last_ipc_error_ = IPC_NO_CONNECTION; + // TODO(taku): ICPClientMutex doesn't take IPC path name into consideration. + // Currently, it is not a critical problem, as we only have single + // channel (session). + ScopedReleaseMutex ipc_mutex(Singleton::get()->get()); + + if (ipc_mutex.get() == NULL) { + LOG(ERROR) << "IPC mutex is not available"; + } else { + const int kMutexTimeout = 10 * 1000; // wait at most 10sec. + switch (::WaitForSingleObject(ipc_mutex.get(), kMutexTimeout)) { + case WAIT_TIMEOUT: + // TODO(taku): with suspend/resume, WaitForSingleObject may + // return WAIT_TIMEOUT. We have to consider the case + // in the future. + LOG(ERROR) << "IPC client was not available even after " + << kMutexTimeout << " msec."; + break; + case WAIT_ABANDONED: + DLOG(INFO) << "mutex object was removed"; + break; + case WAIT_OBJECT_0: + break; + default: + break; + } + } + IPCPathManager *manager = IPCPathManager::GetIPCPathManager(name); if (manager == NULL) { LOG(ERROR) << "IPCPathManager::GetIPCPathManager failed"; @@ -441,7 +548,7 @@ } // wait for 10 second until server is ready - // TODO(taku): control the timout via flag. + // TODO(taku): control the timeout via flag. #ifdef _DEBUG const int kNamedPipeTimeout = 100000; // 100 sec #else diff --git a/mozc_build_tools/gyp/AUTHORS b/mozc_build_tools/gyp/AUTHORS deleted file mode 100644 index f0b6752..0000000 --- a/mozc_build_tools/gyp/AUTHORS +++ /dev/null @@ -1,5 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. -Steven Knight diff --git a/mozc_build_tools/gyp/DEPS b/mozc_build_tools/gyp/DEPS deleted file mode 100644 index 4a46eba..0000000 --- a/mozc_build_tools/gyp/DEPS +++ /dev/null @@ -1,8 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -deps = { - "scons": - "svn://chrome-svn.corp.google.com/chrome/trunk/src/third_party/scons", -} diff --git a/mozc_build_tools/gyp/LICENSE b/mozc_build_tools/gyp/LICENSE deleted file mode 100644 index ab6b011..0000000 --- a/mozc_build_tools/gyp/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/mozc_build_tools/gyp/MANIFEST b/mozc_build_tools/gyp/MANIFEST deleted file mode 100644 index 925ecc1..0000000 --- a/mozc_build_tools/gyp/MANIFEST +++ /dev/null @@ -1,21 +0,0 @@ -setup.py -gyp -LICENSE -AUTHORS -pylib/gyp/MSVSNew.py -pylib/gyp/MSVSProject.py -pylib/gyp/MSVSToolFile.py -pylib/gyp/MSVSUserFile.py -pylib/gyp/MSVSVersion.py -pylib/gyp/SCons.py -pylib/gyp/__init__.py -pylib/gyp/common.py -pylib/gyp/input.py -pylib/gyp/xcodeproj_file.py -pylib/gyp/generator/__init__.py -pylib/gyp/generator/gypd.py -pylib/gyp/generator/gypsh.py -pylib/gyp/generator/make.py -pylib/gyp/generator/msvs.py -pylib/gyp/generator/scons.py -pylib/gyp/generator/xcode.py diff --git a/mozc_build_tools/gyp/PRESUBMIT.py b/mozc_build_tools/gyp/PRESUBMIT.py deleted file mode 100755 index 4c99288..0000000 --- a/mozc_build_tools/gyp/PRESUBMIT.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2010, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -EXCLUDED_PATHS = () - - -def CheckChangeOnUpload(input_api, output_api): - report = [] - black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS - sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) - report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( - input_api, output_api, sources)) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS - sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) - report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( - input_api, output_api, sources)) - report.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - 'http://gyp-status.appspot.com/status', - 'http://gyp-status.appspot.com/current')) - return report diff --git a/mozc_build_tools/gyp/codereview.settings b/mozc_build_tools/gyp/codereview.settings deleted file mode 100644 index a04a244..0000000 --- a/mozc_build_tools/gyp/codereview.settings +++ /dev/null @@ -1,10 +0,0 @@ -# This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: gyp-developer@googlegroups.com -VIEW_VC: http://code.google.com/p/gyp/source/detail?r= -TRY_ON_UPLOAD: True -TRYSERVER_PROJECT: gyp -TRYSERVER_PATCHLEVEL: 0 -TRYSERVER_ROOT: trunk -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl - diff --git a/mozc_build_tools/gyp/gyp b/mozc_build_tools/gyp/gyp deleted file mode 100755 index d52e711..0000000 --- a/mozc_build_tools/gyp/gyp +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -# TODO(mark): sys.path manipulation is some temporary testing stuff. -try: - import gyp -except ImportError, e: - import os.path - sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) - import gyp - -if __name__ == '__main__': - sys.exit(gyp.main(sys.argv[1:])) diff --git a/mozc_build_tools/gyp/gyp.bat b/mozc_build_tools/gyp/gyp.bat deleted file mode 100755 index 90fbc6d..0000000 --- a/mozc_build_tools/gyp/gyp.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0/gyp" %* diff --git a/mozc_build_tools/gyp/gyp_dummy.c b/mozc_build_tools/gyp/gyp_dummy.c deleted file mode 100644 index fb55bbc..0000000 --- a/mozc_build_tools/gyp/gyp_dummy.c +++ /dev/null @@ -1,7 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -int main() { - return 0; -} diff --git a/mozc_build_tools/gyp/gyptest.py b/mozc_build_tools/gyp/gyptest.py deleted file mode 100755 index d9677db..0000000 --- a/mozc_build_tools/gyp/gyptest.py +++ /dev/null @@ -1,255 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -__doc__ = """ -gyptest.py -- test runner for GYP tests. -""" - -import os -import optparse -import subprocess -import sys - -class CommandRunner: - """ - Executor class for commands, including "commands" implemented by - Python functions. - """ - verbose = True - active = True - - def __init__(self, dictionary={}): - self.subst_dictionary(dictionary) - - def subst_dictionary(self, dictionary): - self._subst_dictionary = dictionary - - def subst(self, string, dictionary=None): - """ - Substitutes (via the format operator) the values in the specified - dictionary into the specified command. - - The command can be an (action, string) tuple. In all cases, we - perform substitution on strings and don't worry if something isn't - a string. (It's probably a Python function to be executed.) - """ - if dictionary is None: - dictionary = self._subst_dictionary - if dictionary: - try: - string = string % dictionary - except TypeError: - pass - return string - - def display(self, command, stdout=None, stderr=None): - if not self.verbose: - return - if type(command) == type(()): - func = command[0] - args = command[1:] - s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) - if type(command) == type([]): - # TODO: quote arguments containing spaces - # TODO: handle meta characters? - s = ' '.join(command) - else: - s = self.subst(command) - if not s.endswith('\n'): - s += '\n' - sys.stdout.write(s) - sys.stdout.flush() - - def execute(self, command, stdout=None, stderr=None): - """ - Executes a single command. - """ - if not self.active: - return 0 - if type(command) == type(''): - command = self.subst(command) - cmdargs = shlex.split(command) - if cmdargs[0] == 'cd': - command = (os.chdir,) + tuple(cmdargs[1:]) - if type(command) == type(()): - func = command[0] - args = command[1:] - return func(*args) - else: - if stdout is sys.stdout: - # Same as passing sys.stdout, except python2.4 doesn't fail on it. - subout = None - else: - # Open pipe for anything else so Popen works on python2.4. - subout = subprocess.PIPE - if stderr is sys.stderr: - # Same as passing sys.stderr, except python2.4 doesn't fail on it. - suberr = None - elif stderr is None: - # Merge with stdout if stderr isn't specified. - suberr = subprocess.STDOUT - else: - # Open pipe for anything else so Popen works on python2.4. - suberr = subprocess.PIPE - p = subprocess.Popen(command, - shell=(sys.platform == 'win32'), - stdout=subout, - stderr=suberr) - p.wait() - if stdout is None: - self.stdout = p.stdout.read() - elif stdout is not sys.stdout: - stdout.write(p.stdout.read()) - if stderr not in (None, sys.stderr): - stderr.write(p.stderr.read()) - return p.returncode - - def run(self, command, display=None, stdout=None, stderr=None): - """ - Runs a single command, displaying it first. - """ - if display is None: - display = command - self.display(display) - return self.execute(command, stdout, stderr) - - -class Unbuffered: - def __init__(self, fp): - self.fp = fp - def write(self, arg): - self.fp.write(arg) - self.fp.flush() - def __getattr__(self, attr): - return getattr(self.fp, attr) - -sys.stdout = Unbuffered(sys.stdout) -sys.stderr = Unbuffered(sys.stderr) - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - if '.svn' in dirs: - dirs.remove('.svn') - result.extend([ os.path.join(root, f) for f in files - if f.startswith('gyptest') and f.endswith('.py') ]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" - parser = optparse.OptionParser(usage=usage) - parser.add_option("-a", "--all", action="store_true", - help="run all tests") - parser.add_option("-C", "--chdir", action="store", default=None, - help="chdir to the specified directory") - parser.add_option("-f", "--format", action="store", default='', - help="run tests with the specified formats") - parser.add_option("-l", "--list", action="store_true", - help="list available tests and exit") - parser.add_option("-n", "--no-exec", action="store_true", - help="no execute, just print the command line") - parser.add_option("--passed", action="store_true", - help="report passed tests") - parser.add_option("--path", action="append", default=[], - help="additional $PATH directory") - parser.add_option("-q", "--quiet", action="store_true", - help="quiet, don't print test command lines") - opts, args = parser.parse_args(argv[1:]) - - if opts.chdir: - os.chdir(opts.chdir) - - if opts.path: - os.environ['PATH'] += ':' + ':'.join(opts.path) - - if not args: - if not opts.all: - sys.stderr.write('Specify -a to get all tests.\n') - return 1 - args = ['test'] - - tests = [] - for arg in args: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - tests.append(arg) - - if opts.list: - for test in tests: - print test - sys.exit(0) - - CommandRunner.verbose = not opts.quiet - CommandRunner.active = not opts.no_exec - cr = CommandRunner() - - os.environ['PYTHONPATH'] = os.path.abspath('test/lib') - if not opts.quiet: - sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) - - passed = [] - failed = [] - no_result = [] - - if opts.format: - format_list = opts.format.split(',') - else: - # TODO: not duplicate this mapping from pylib/gyp/__init__.py - format_list = [ { - 'freebsd7': 'make', - 'freebsd8': 'make', - 'cygwin': 'msvs', - 'win32': 'msvs', - 'linux2': 'make', - 'darwin': 'xcode', - }[sys.platform] ] - - for format in format_list: - os.environ['TESTGYP_FORMAT'] = format - if not opts.quiet: - sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) - - for test in tests: - status = cr.run([sys.executable, test], - stdout=sys.stdout, - stderr=sys.stderr) - if status == 2: - no_result.append(test) - elif status: - failed.append(test) - else: - passed.append(test) - - if not opts.quiet: - def report(description, tests): - if tests: - if len(tests) == 1: - sys.stdout.write("\n%s the following test:\n" % description) - else: - fmt = "\n%s the following %d tests:\n" - sys.stdout.write(fmt % (description, len(tests))) - sys.stdout.write("\t" + "\n\t".join(tests) + "\n") - - if opts.passed: - report("Passed", passed) - report("Failed", failed) - report("No result from", no_result) - - if failed: - return 1 - else: - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/mozc_build_tools/gyp/pylib/gyp/MSVSNew.py b/mozc_build_tools/gyp/pylib/gyp/MSVSNew.py deleted file mode 100644 index f18e5b6..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/MSVSNew.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation for SCons.""" - -import common -import os -import random - -# hashlib is supplied as of Python 2.5 as the replacement interface for md5 -# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_md5 = hashlib.md5 -except ImportError: - import md5 - _new_md5 = md5.new - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', - 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', -} - -#------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed='msvs_new'): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = _new_md5(str(seed) + str(name)).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] - + '-' + d[20:32] + '}') - return guid - -#------------------------------------------------------------------------------ - - -class MSVSFolder: - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name = None, entries = None, - guid = None, items = None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed='msvs_folder') - return self.guid - - -#------------------------------------------------------------------------------ - - -class MSVSProject: - """Visual Studio project.""" - - def __init__(self, path, name = None, dependencies = None, guid = None, - config_platform_overrides = None): - """Initializes the project. - - Args: - path: Relative path to project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - """ - self.path = path - self.guid = guid - - if name: - self.name = name - else: - # Use project filename - self.name = os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - -#------------------------------------------------------------------------------ - - -class MSVSSolution: - """Visual Studio solution.""" - - def __init__(self, path, version, entries=None, variants=None, - websiteProperties=True): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ['Debug|Win32', 'Release|Win32'] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - - def Write(self, writer=common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = [] - entries_to_check = self.entries[:] - while entries_to_check: - # Pop from the beginning of the list to preserve the user's order. - e = entries_to_check.pop(0) - - # A project or folder can only appear once in the solution's folder tree. - # This also protects from cycles. - if e in all_entries: - #raise IndexError('Entry "%s" appears more than once in solution' % - # e.name) - continue - - all_entries.append(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - # Sort by name then guid (so things are in order on vs2008). - def NameThenGuid(a, b): - if a.name < b.name: return -1 - if a.name > b.name: return 1 - if a.get_guid() < b.get_guid(): return -1 - if a.get_guid() > b.get_guid(): return 1 - return 0 - - all_entries = sorted(all_entries, NameThenGuid) - - # Open file and print header - f = writer(self.path) - f.write('Microsoft Visual Studio Solution File, ' - 'Format Version %s\r\n' % self.version.SolutionVersion()) - f.write('# %s\r\n' % self.version.Description()) - - # Project entries - for e in all_entries: - f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - e.path.replace('/', '\\'), # Folder name (again) - e.get_guid(), # Entry GUID - )) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - '\tEndProjectSection\r\n') - - if isinstance(e, MSVSFolder): - if e.items: - f.write('\tProjectSection(SolutionItems) = preProject\r\n') - for i in e.items: - f.write('\t\t%s = %s\r\n' % (i, i)) - f.write('\tEndProjectSection\r\n') - - if isinstance(e, MSVSProject): - if e.dependencies: - f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') - for d in e.dependencies: - f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) - f.write('\tEndProjectSection\r\n') - - f.write('EndProject\r\n') - - # Global section - f.write('Global\r\n') - - # Configurations (variants) - f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') - for v in self.variants: - f.write('\t\t%s = %s\r\n' % (v, v)) - f.write('\tEndGlobalSection\r\n') - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - - # Enable project in this solution configuration. - f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - f.write('\tEndGlobalSection\r\n') - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') - f.write('\t\tHideSolutionNode = FALSE\r\n') - f.write('\tEndGlobalSection\r\n') - - # Folder mappings - # TODO(rspangler): Should omit this section if there are no folders - f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) - f.write('\tEndGlobalSection\r\n') - - f.write('EndGlobal\r\n') - - f.close() diff --git a/mozc_build_tools/gyp/pylib/gyp/MSVSProject.py b/mozc_build_tools/gyp/pylib/gyp/MSVSProject.py deleted file mode 100644 index 18d98e7..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/MSVSProject.py +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import common -import xml.dom -import xml.dom.minidom -import MSVSNew - -#------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self.name = name - self.attrs = attrs or {} - - def CreateElement(self, doc): - """Creates an element for the tool. - - Args: - doc: xml.dom.Document object to use for node creation. - - Returns: - A new xml.dom.Element for the tool. - """ - node = doc.createElement('Tool') - node.setAttribute('Name', self.name) - for k, v in self.attrs.items(): - node.setAttribute(k, v) - return node - - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - """ - self.project_path = project_path - self.doc = None - self.version = version - - def Create(self, name, guid=None, platforms=None): - """Creates the project document. - - Args: - name: Name of the project. - guid: GUID to use for project, if not None. - """ - self.name = name - self.guid = guid or MSVSNew.MakeGuid(self.project_path) - - # Default to Win32 for platforms. - if not platforms: - platforms = ['Win32'] - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('ProjectType', 'Visual C++') - self.n_root.setAttribute('Version', self.version.ProjectVersion()) - self.n_root.setAttribute('Name', self.name) - self.n_root.setAttribute('ProjectGUID', self.guid) - self.n_root.setAttribute('RootNamespace', self.name) - self.n_root.setAttribute('Keyword', 'Win32Proj') - - # Add platform list - n_platform = self.doc.createElement('Platforms') - self.n_root.appendChild(n_platform) - for platform in platforms: - n = self.doc.createElement('Platform') - n.setAttribute('Name', platform) - n_platform.appendChild(n) - - # Add tool files section - self.n_tool_files = self.doc.createElement('ToolFiles') - self.n_root.appendChild(self.n_tool_files) - - # Add configurations section - self.n_configs = self.doc.createElement('Configurations') - self.n_root.appendChild(self.n_configs) - - # Add empty References section - self.n_root.appendChild(self.doc.createElement('References')) - - # Add files section - self.n_files = self.doc.createElement('Files') - self.n_root.appendChild(self.n_files) - # Keep a dict keyed on filename to speed up access. - self.n_files_dict = dict() - - # Add empty Globals section - self.n_root.appendChild(self.doc.createElement('Globals')) - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - n_tool = self.doc.createElement('ToolFile') - n_tool.setAttribute('RelativePath', path) - self.n_tool_files.appendChild(n_tool) - - def _AddConfigToNode(self, parent, config_type, config_name, attrs=None, - tools=None): - """Adds a configuration to the parent node. - - Args: - parent: Destination node. - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - n_config = self.doc.createElement(config_type) - n_config.setAttribute('Name', config_name) - for k, v in attrs.items(): - n_config.setAttribute(k, v) - parent.appendChild(n_config) - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - n_config.appendChild(t.CreateElement(self.doc)) - else: - n_config.appendChild(Tool(t).CreateElement(self.doc)) - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = self.doc.createElement('Filter') - node.setAttribute('Name', f.name) - self._AddFilesToNode(node, f.contents) - else: - node = self.doc.createElement('File') - node.setAttribute('RelativePath', f) - self.n_files_dict[f] = node - parent.appendChild(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.n_files, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.n_files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools) - - def Write(self, writer=common.WriteOnDiff): - """Writes the project file.""" - f = writer(self.project_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/mozc_build_tools/gyp/pylib/gyp/MSVSToolFile.py b/mozc_build_tools/gyp/pylib/gyp/MSVSToolFile.py deleted file mode 100644 index bd8fd9e..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/MSVSToolFile.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import common -import xml.dom -import xml.dom.minidom - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - """ - self.tool_file_path = tool_file_path - self.doc = None - - def Create(self, name): - """Creates the tool file document. - - Args: - name: Name of the tool file. - """ - self.name = name - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('Version', '8.00') - self.n_root.setAttribute('Name', self.name) - - # Add rules section - self.n_rules = self.doc.createElement('Rules') - self.n_root.appendChild(self.n_rules) - - def AddCustomBuildRule(self, name, cmd, description, - additional_dependencies, - outputs, extensions): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - n_rule = self.doc.createElement('CustomBuildRule') - n_rule.setAttribute('Name', name) - n_rule.setAttribute('ExecutionDescription', description) - n_rule.setAttribute('CommandLine', cmd) - n_rule.setAttribute('Outputs', ';'.join(outputs)) - n_rule.setAttribute('FileExtensions', ';'.join(extensions)) - n_rule.setAttribute('AdditionalDependencies', - ';'.join(additional_dependencies)) - self.n_rules.appendChild(n_rule) - - def Write(self, writer=common.WriteOnDiff): - """Writes the tool file.""" - f = writer(self.tool_file_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/mozc_build_tools/gyp/pylib/gyp/MSVSUserFile.py b/mozc_build_tools/gyp/pylib/gyp/MSVSUserFile.py deleted file mode 100644 index 21098e3..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/MSVSUserFile.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import common -import os -import re -import socket # for gethostname -import xml.dom -import xml.dom.minidom - - -#------------------------------------------------------------------------------ - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if '/' in command or '\\' in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get('PATH','').split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r'[ \t\n]', arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.doc = None - - def Create(self, name): - """Creates the user file document. - - Args: - name: Name of the user file. - """ - self.name = name - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('Version', self.version.ProjectVersion()) - self.n_root.setAttribute('Name', self.name) - - # Add configurations section - self.n_configs = self.doc.createElement('Configurations') - self.n_root.appendChild(self.n_configs) - - def _AddConfigToNode(self, parent, config_type, config_name): - """Adds a configuration to the parent node. - - Args: - parent: Destination node. - config_type: Type of configuration node. - config_name: Configuration name. - """ - # Add configuration node and its attributes - n_config = self.doc.createElement(config_type) - n_config.setAttribute('Name', config_name) - parent.appendChild(n_config) - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self._AddConfigToNode(self.n_configs, 'Configuration', name) - - - def AddDebugSettings(self, config_name, command, environment = {}, - working_directory=""): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - n_cmd = self.doc.createElement('DebugSettings') - abs_command = _FindCommandInPath(command[0]) - n_cmd.setAttribute('Command', abs_command) - n_cmd.setAttribute('WorkingDirectory', working_directory) - n_cmd.setAttribute('CommandArguments', " ".join(command[1:])) - n_cmd.setAttribute('RemoteMachine', socket.gethostname()) - - if environment and isinstance(environment, dict): - n_cmd.setAttribute('Environment', - " ".join(['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()])) - else: - n_cmd.setAttribute('Environment', '') - - n_cmd.setAttribute('EnvironmentMerge', 'true') - - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - n_cmd.setAttribute('Attach', 'false') - n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger - n_cmd.setAttribute('Remote', '1') - n_cmd.setAttribute('RemoteCommand', '') - n_cmd.setAttribute('HttpUrl', '') - n_cmd.setAttribute('PDBPath', '') - n_cmd.setAttribute('SQLDebugging', '') - n_cmd.setAttribute('DebuggerFlavor', '0') - n_cmd.setAttribute('MPIRunCommand', '') - n_cmd.setAttribute('MPIRunArguments', '') - n_cmd.setAttribute('MPIRunWorkingDirectory', '') - n_cmd.setAttribute('ApplicationCommand', '') - n_cmd.setAttribute('ApplicationArguments', '') - n_cmd.setAttribute('ShimCommand', '') - n_cmd.setAttribute('MPIAcceptMode', '') - n_cmd.setAttribute('MPIAcceptFilter', '') - - # Find the config, and add it if it doesn't exist. - found = False - for config in self.n_configs.childNodes: - if config.getAttribute("Name") == config_name: - found = True - - if not found: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - for config in self.n_configs.childNodes: - if config.getAttribute("Name") == config_name: - config.appendChild(n_cmd) - break - - def Write(self, writer=common.WriteOnDiff): - """Writes the user file.""" - f = writer(self.user_file_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/mozc_build_tools/gyp/pylib/gyp/MSVSVersion.py b/mozc_build_tools/gyp/pylib/gyp/MSVSVersion.py deleted file mode 100755 index d2c17bb..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/MSVSVersion.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import os -import re -import subprocess -import sys - - -class VisualStudioVersion: - """Information regarding a version of Visual Studio.""" - - def __init__(self, short_name, description, - solution_version, project_version, flat_sln): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - -def _RegistryGetValue(key, value): - """Use reg.exe to read a paricular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - key: The registry key to read from. - value: The particular value to read. - Return: - The contents there, or None for failure. - """ - # Skip if not on Windows. - if sys.platform not in ('win32', 'cygwin'): - return None - # Run reg.exe. - cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'), - 'query', key, '/v', value] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - text = p.communicate()[0] - # Require a successful return value. - if p.returncode: - return None - # Extract value. - match = re.search(r'REG_\w+[ ]+([^\r]+)\r\n', text) - if not match: - return None - return match.group(1) - - -def _CreateVersion(name): - versions = { - '2008': VisualStudioVersion('2008', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=False), - '2008e': VisualStudioVersion('2008e', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=True), - '2005': VisualStudioVersion('2005', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=False), - '2005e': VisualStudioVersion('2005e', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=True), - } - return versions[str(name)] - - -def _DetectVisualStudioVersions(): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Only versions 8-9 are considered. - Possibilities are: - 2005 - Visual Studio 2005 (8) - 2008 - Visual Studio 2008 (9) - """ - version_to_year = {'8.0': '2005', '9.0': '2008'} - versions = [] - for version in ('9.0', '8.0'): - # Get the install dir for this version. - key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version - path = _RegistryGetValue(key, 'InstallDir') - if not path: - continue - # Check for full. - if os.path.exists(os.path.join(path, 'devenv.exe')): - # Add this one. - versions.append(_CreateVersion(version_to_year[version])) - # Check for express. - elif os.path.exists(os.path.join(path, 'vcexpress.exe')): - # Add this one. - versions.append(_CreateVersion(version_to_year[version] + 'e')) - return versions - - -def SelectVisualStudioVersion(version='auto'): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == 'auto': - version = os.environ.get('GYP_MSVS_VERSION', 'auto') - # In auto mode, pick the most preferred version present. - if version == 'auto': - versions = _DetectVisualStudioVersions() - if not versions: - # Default to 2005. - return _CreateVersion('2005') - return versions[0] - # Convert version string into a version object. - return _CreateVersion(version) diff --git a/mozc_build_tools/gyp/pylib/gyp/SCons.py b/mozc_build_tools/gyp/pylib/gyp/SCons.py deleted file mode 100644 index 9c57bcb..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/SCons.py +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -SCons generator. - -This contains class definitions and supporting functions for generating -pieces of SCons files for the different types of GYP targets. -""" - -import os - - -def WriteList(fp, list, prefix='', - separator=',\n ', - preamble=None, - postamble=None): - fp.write(preamble or '') - fp.write((separator or ' ').join([prefix + l for l in list])) - fp.write(postamble or '') - - -class TargetBase(object): - """ - Base class for a SCons representation of a GYP target. - """ - is_ignored = False - target_prefix = '' - target_suffix = '' - def __init__(self, spec): - self.spec = spec - def full_product_name(self): - """ - Returns the full name of the product being built: - - * Uses 'product_name' if it's set, else prefix + 'target_name'. - * Prepends 'product_dir' if set. - * Appends SCons suffix variables for the target type (or - product_extension). - """ - suffix = self.target_suffix - product_extension = self.spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = self.spec.get('product_prefix', self.target_prefix) - name = self.spec['target_name'] - name = prefix + self.spec.get('product_name', name) + suffix - product_dir = self.spec.get('product_dir') - if product_dir: - name = os.path.join(product_dir, name) - else: - name = os.path.join(self.out_dir, name) - return name - - def write_input_files(self, fp): - """ - Writes the definition of the input files (sources). - """ - sources = self.spec.get('sources') - if not sources: - fp.write('\ninput_files = []\n') - return - preamble = '\ninput_files = [\n ' - postamble = ',\n]\n' - WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble) - - def builder_call(self): - """ - Returns the actual SCons builder call to build this target. - """ - name = self.full_product_name() - return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name) - def write_target(self, fp, src_dir='', pre=''): - """ - Writes the lines necessary to build this target. - """ - fp.write('\n' + pre) - fp.write('_outputs = %s\n' % self.builder_call()) - fp.write('target_files.extend(_outputs)\n') - - -class NoneTarget(TargetBase): - """ - A GYP target type of 'none', implicitly or explicitly. - """ - def write_target(self, fp, pre=''): - fp.write('\ntarget_files.extend(input_files)\n') - - -class SettingsTarget(TargetBase): - """ - A GYP target type of 'settings'. - """ - is_ignored = True - - -compilable_sources_template = """ -_result = [] -for infile in input_files: - if env.compilable(infile): - if (type(infile) == type('') - and (infile.startswith(%(src_dir)r) - or not os.path.isabs(env.subst(infile)))): - # Force files below the build directory by replacing all '..' - # elements in the path with '__': - base, ext = os.path.splitext(os.path.normpath(infile)) - base = [d == '..' and '__' or d for d in base.split('/')] - base = os.path.join(*base) - object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base - if not infile.startswith(%(src_dir)r): - infile = %(src_dir)r + infile - infile = env.%(name)s(object, infile)[0] - else: - infile = env.%(name)s(infile)[0] - _result.append(infile) -input_files = _result -""" - -class CompilableSourcesTargetBase(TargetBase): - """ - An abstract base class for targets that compile their source files. - - We explicitly transform compilable files into object files, - even though SCons could infer that for us, because we want - to control where the object file ends up. (The implicit rules - in SCons always put the object file next to the source file.) - """ - intermediate_builder_name = None - def write_target(self, fp, src_dir='', pre=''): - if self.intermediate_builder_name is None: - raise NotImplementedError - if src_dir and not src_dir.endswith('/'): - src_dir += '/' - variables = { - 'src_dir': src_dir, - 'name': self.intermediate_builder_name, - } - fp.write(compilable_sources_template % variables) - super(CompilableSourcesTargetBase, self).write_target(fp) - - -class ProgramTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'executable'. - """ - builder_name = 'GypProgram' - intermediate_builder_name = 'StaticObject' - target_prefix = '${PROGPREFIX}' - target_suffix = '${PROGSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -class StaticLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'static_library'. - """ - builder_name = 'GypStaticLibrary' - intermediate_builder_name = 'StaticObject' - target_prefix = '${LIBPREFIX}' - target_suffix = '${LIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class SharedLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'shared_library'. - """ - builder_name = 'GypSharedLibrary' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class LoadableModuleTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'loadable_module'. - """ - builder_name = 'GypLoadableModule' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -TargetMap = { - None : NoneTarget, - 'none' : NoneTarget, - 'settings' : SettingsTarget, - 'executable' : ProgramTarget, - 'static_library' : StaticLibraryTarget, - 'shared_library' : SharedLibraryTarget, - 'loadable_module' : LoadableModuleTarget, -} - -def Target(spec): - return TargetMap[spec.get('type')](spec) diff --git a/mozc_build_tools/gyp/pylib/gyp/__init__.py b/mozc_build_tools/gyp/pylib/gyp/__init__.py deleted file mode 100644 index 21c6a1b..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/__init__.py +++ /dev/null @@ -1,453 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import gyp.input -import optparse -import os.path -import re -import shlex -import sys - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = 'general' -DEBUG_VARIABLES = 'variables' -DEBUG_INCLUDES = 'includes' - -def DebugOutput(mode, message): - if mode in gyp.debug.keys(): - print "%s: %s" % (mode.upper(), message) - -def FindBuildFiles(): - extension = '.gyp' - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file[-len(extension):] == extension: - build_files.append(file) - return build_files - - -def Load(build_files, format, default_variables={}, - includes=[], depth='.', params={}, check=False, circular_check=True): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables['GENERATOR'] = format - - generator_name = 'gyp.generator.' + format - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateVariables', None): - generator.CalculateVariables(default_variables, params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - 'generator_wants_absolute_build_file_paths': - getattr(generator, 'generator_wants_absolute_build_file_paths', False), - 'generator_handles_variants': - getattr(generator, 'generator_handles_variants', False), - 'non_configuration_keys': - getattr(generator, 'generator_additional_non_configuration_keys', []), - 'path_sections': - getattr(generator, 'generator_additional_path_sections', []), - 'extra_sources_for_rules': - getattr(generator, 'generator_extra_sources_for_rules', []), - 'generator_supports_multiple_toolsets': - getattr(generator, 'generator_supports_multiple_toolsets', False), - } - - # Process the input specific to this generator. - result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check) - return [generator] + result - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = { } - for item in name_value_list: - tokens = item.split('=', 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - -def FormatOpt(opt, value): - if opt.startswith('--'): - return '%s=%s' % (opt, value) - return opt + value - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - flags.append(FormatOpt(flag, predicate(flag_value))) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): - opt = metadata['opt'] - value = getattr(options, name) - value_predicate = metadata['type'] == 'path' and FixPath or Noop - action = metadata['action'] - env_name = metadata['env_name'] - if action == 'append': - flags.extend(RegenerateAppendFlag(opt, value, value_predicate, - env_name, options)) - elif action in ('store', None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ('store_true', 'store_false'): - if ((action == 'store_true' and value) or - (action == 'store_false' and not value)): - flags.append(opt) - elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' - 'for %s flag %r env_name %r' % (action, opt, - env_name)) - else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) - - return flags - -class RegeneratableOptionParser(optparse.OptionParser): - def __init__(self): - self.__regeneratable_options = {} - optparse.OptionParser.__init__(self) - - def add_option(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as OptionParser.add_option, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop('env_name', None) - if 'dest' in kw and kw.pop('regenerate', True): - dest = kw['dest'] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get('type') - if type == 'path': - kw['type'] = 'string' - - self.__regeneratable_options[dest] = { - 'action': kw.get('action'), - 'type': type, - 'env_name': env_name, - 'opt': args[0], - } - - optparse.OptionParser.add_option(self, *args, **kw) - - def parse_args(self, *args): - values, args = optparse.OptionParser.parse_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - -def main(args): - my_name = os.path.basename(sys.argv[0]) - - parser = RegeneratableOptionParser() - usage = 'usage: %s [options ...] [build_file ...]' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', - env_name='GYP_DEFINES', - help='sets variable VAR to value VAL') - parser.add_option('-f', '--format', dest='formats', action='append', - env_name='GYP_GENERATORS', regenerate=False, - help='output formats to generate') - parser.add_option('--msvs-version', dest='msvs_version', - regenerate=False, - help='Deprecated; use -G msvs_version=MSVS_VERSION instead') - parser.add_option('-I', '--include', dest='includes', action='append', - metavar='INCLUDE', type='path', - help='files to include in all loaded .gyp files') - parser.add_option('--depth', dest='depth', metavar='PATH', type='path', - help='set DEPTH gyp variable to a relative path to PATH') - parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', - action='append', default=[], help='turn on a debugging ' - 'mode for debugging GYP. Supported modes are "variables" ' - 'and "general"') - parser.add_option('-S', '--suffix', dest='suffix', default='', - help='suffix to add to generated files') - parser.add_option('-G', dest='generator_flags', action='append', default=[], - metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', - help='sets generator flag FLAG to VAL') - parser.add_option('--generator-output', dest='generator_output', - action='store', default=None, metavar='DIR', type='path', - env_name='GYP_GENERATOR_OUTPUT', - help='puts generated build files under DIR') - parser.add_option('--ignore-environment', dest='use_environment', - action='store_false', default=True, regenerate=False, - help='do not read options from environment variables') - parser.add_option('--check', dest='check', action='store_true', - help='check format of gyp files') - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_option('--no-circular-check', dest='circular_check', - action='store_false', default=True, regenerate=False, - help="don't check for circular relationships between files") - - # We read a few things from ~/.gyp, so set up a var for that. - home_vars = ['HOME'] - if sys.platform in ('cygwin', 'win32'): - home_vars.append('USERPROFILE') - home = None - for home_var in home_vars: - home = os.getenv(home_var) - if home != None: - break - home_dot_gyp = None - if home != None: - home_dot_gyp = os.path.join(home, '.gyp') - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - - # TODO(thomasvl): add support for ~/.gyp/defaults - - (options, build_files_arg) = parser.parse_args(args) - build_files = build_files_arg - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get('GYP_GENERATORS', []) - if generate_formats: - generate_formats = re.split('[\s,]', generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - options.formats = [ {'darwin': 'xcode', - 'win32': 'msvs', - 'cygwin': 'msvs', - 'freebsd7': 'make', - 'freebsd8': 'make', - 'linux2': 'make', - 'openbsd4': 'make', - 'sunos5': 'make',}[sys.platform] ] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get('GYP_GENERATOR_OUTPUT') - if g_o: - options.generator_output = g_o - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, 'running with these options:') - for (option, value) in options.__dict__.items(): - if option[0] == '_': - continue - if isinstance(value, basestring): - DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value)) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value))) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \ - (my_name, my_name) - return 1 - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise Exception, \ - 'Could not automatically locate src directory. This is a ' + \ - 'temporary Chromium feature that will be removed. Use ' + \ - '--depth as a workaround.' - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv('GYP_DEFINES') - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s" % cmdline_default_variables) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp != None: - default_include = os.path.join(home_dot_gyp, 'include.gypi') - if os.path.exists(default_include): - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags) - - # TODO: Remove this and the option after we've gotten folks to move to the - # generator flag. - if options.msvs_version: - print >>sys.stderr, \ - 'DEPRECATED: Use generator flag (-G msvs_version=' + \ - options.msvs_version + ') instead of --msvs-version=' + \ - options.msvs_version - generator_flags['msvs_version'] = options.msvs_version - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = {'options': options, - 'build_files': build_files, - 'generator_flags': generator_flags, - 'cwd': os.getcwd(), - 'build_files_arg': build_files_arg, - 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp} - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load(build_files, format, - cmdline_default_variables, - includes, options.depth, - params, options.check, - options.circular_check) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - # Done - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/mozc_build_tools/gyp/pylib/gyp/common.py b/mozc_build_tools/gyp/pylib/gyp/common.py deleted file mode 100644 index f2070bf..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/common.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import errno -import filecmp -import os.path -import re -import tempfile -import sys - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + ' ' + msg,) - else: - e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(':', 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit('#', 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), - parsed_build_file)) - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ':' + target - if toolset: - fully_qualified = fully_qualified + '#' + toolset - return fully_qualified - - -def RelativePath(path, relative_to): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - - # Convert to absolute (and therefore normalized paths). - path = os.path.abspath(path) - relative_to = os.path.abspath(relative_to) - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ - path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return '' - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$') - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - if _quote.search(argument): - quote = '"' - else: - quote = '' - - encoded = quote + re.sub(_escape, r'\\\1', argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return ' '.join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies. - """ - dependencies = set() - for r in roots: - spec = target_dicts[r] - r_deps = list(set((spec.get('dependencies', []) + - spec.get('dependencies_original', [])))) - for d in r_deps: - if d not in roots: - dependencies.add(d) - for d in DeepDependencyTargets(target_dicts, r_deps): - if d not in roots: - dependencies.add(d) - return list(dependencies) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer: - """Wrapper around file which only covers the target if it differs.""" - def __init__(self): - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix='.tmp', - prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) - try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) - if sys.platform == 'win32' and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. Sadly there - # is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - return Writer() - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - -def uniquer(seq, idfun=None): - if idfun is None: - def idfun(x): return x - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: continue - seen[marker] = 1 - result.append(item) - return result diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/__init__.py b/mozc_build_tools/gyp/pylib/gyp/generator/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/gypd.py b/mozc_build_tools/gyp/pylib/gyp/generator/gypd.py deleted file mode 100644 index 948f0b8..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/gypd.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import errno -import os -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = { -} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = \ - gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != '.gyp': - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params['options'].suffix + '.gypd' - - if not output_file in output_files: - output_files[output_file] = input_file - - for output_file, input_file in output_files.iteritems(): - output = open(output_file, 'w') - pprint.pprint(data[input_file], output) - output.close() diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/gypsh.py b/mozc_build_tools/gyp/pylib/gyp/generator/gypsh.py deleted file mode 100644 index f48b03f..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/gypsh.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -generator_default_variables = { -} - -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - 'target_list': target_list, - 'target_dicts': target_dicts, - 'data': data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ - (sys.version, sys.platform, repr(sorted(locals.keys()))) - - code.interact(banner, local=locals) diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/make.py b/mozc_build_tools/gyp/pylib/gyp/generator/make.py deleted file mode 100644 index 92f360b..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/make.py +++ /dev/null @@ -1,1283 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the the files readable. - -import gyp -import gyp.common -import os.path - -# Debugging-related imports -- remove me once we're solid. -import code -import pprint - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'OS': 'linux', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.so', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'PRODUCT_DIR': '$(builddir)', - 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', - 'LIB_DIR': '$(obj).$(TOOLSET)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - - # This appears unused --- ? - 'CONFIGURATION_NAME': '$(BUILDTYPE)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -def ensure_directory_exists(path): - dir = os.path.dirname(path) - if dir and not os.path.exists(dir): - os.makedirs(dir) - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ("""\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= __default_configuration__ - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -# C++ apps need to be linked with g++. Not sure what's appropriate. -LINK ?= $(CXX) - -CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) -RANLIB.target ?= ranlib - -CC.host ?= gcc -CFLAGS.host ?= -CXX.host ?= g++ -CXXFLAGS.host ?= -LINK.host ?= g++ -LDFLAGS.host ?= -AR.host ?= ar -RANLIB.host ?= ranlib - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$@.d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time.""" -r""" -define fixup_dep -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 's| |\n|g' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" -""" -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(CFLAGS.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(CXXFLAGS.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_alink = AR+RANLIB($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) rc $@ $(filter %.o,$^) && $(RANLIB.$(TOOLSET)) $@ - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@ - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) - -# Shared-object link (for generating .so). -# Set SONAME to the library filename so our binaries don't reference the local, -# absolute paths used on the link command-line. -# TODO: perhaps this can share with the LINK command above? -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" -r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' -""" -""" -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\ - $(subst $(cmd_$@),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out $|,$?) - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do C/C++ dependency munging. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p $(dir $@) $(dir $(depfile)) - @$(cmd_$(1)) - @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) -) -endef - -# Declare "all" target first so it is the default, even though we don't have the -# deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""") - -ROOT_HEADER_SUFFIX_RULES = ("""\ -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -""") - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ -# Suffix rules, putting all outputs into $(obj). -""") - -SHARED_HEADER_SUFFIX_RULES_SRCDIR = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.s': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -"""), - '.S': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cxx': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ -# Try building from generated source, too. -""") - -SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES = ( - SHARED_HEADER_SUFFIX_RULES_COMMENT1 + - ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) + - SHARED_HEADER_SUFFIX_RULES_COMMENT2 + - ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) + - ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values()) -) - -# This gets added to the very beginning of the Makefile. -SHARED_HEADER_SRCDIR = ("""\ -# The source directory tree. -srcdir := %s - -""") - -SHARED_HEADER_BUILDDIR_NAME = ("""\ -# The name of the builddir. -builddir_name ?= %s - -""") - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. First, only consider targets that already have been -# built, as unbuilt targets will be built regardless of dependency info: -all_deps := $(wildcard $(sort $(all_deps))) -# Of those, only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - for res in (filename.endswith(e) for e - in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']): - if res: - return True - return False - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + '.o' - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace('$', '$$') - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - return s - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - - -srcdir_prefix = '' -def Sourceify(path): - """Convert a path to its source directory form.""" - if '$(' in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to a list of all linker dependencies, -# transitively expanded. -# Used in building shared-library-based executables. -target_link_deps = {} - - -class MakefileWriter: - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - print 'Generating %s' % output_filename - - ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - - self.output = self.ComputeOutput(spec) - self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', - 'shared_library') - if self.type in self._INSTALLABLE_TARGETS: - self.alias = os.path.basename(self.output) - else: - self.alias = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs, - part_of_all) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs, part_of_all) - - all_sources = spec.get('sources', []) + extra_sources - if all_sources: - self.WriteSources(configs, deps, all_sources, - extra_outputs, extra_link_deps, part_of_all) - sources = filter(Compilable, all_sources) - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = set([os.path.splitext(s)[1] for s in sources]) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext]) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext]) - self.WriteLn('# End of this set of suffix rules') - - - self.WriteTarget(spec, configs, deps, - extra_link_deps + link_deps, extra_outputs, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = self.alias - - # Update global list of link dependencies. - if self.type == 'static_library': - target_link_deps[qualified_target] = [self.output] - elif self.type == 'shared_library': - # Anyone that uses us transitively depend on all of our link - # dependencies. - target_link_deps[qualified_target] = [self.output] + link_deps - - self.fp.close() - - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - print 'Generating %s' % output_filename - - ensure_directory_exists(output_filename) - self.fp = open(output_filename, 'w') - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn('export builddir_name ?= %s' % - os.path.join(os.path.dirname(output_filename), build_dir)) - self.WriteLn('.PHONY: all') - self.WriteLn('all:') - if makefile_path: - makefile_path = ' -C ' + makefile_path - self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) - self.fp.close() - - - def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - for action in actions: - name = self.target + '_' + action['action_name'] - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - - # Write the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) - if 'message' in action: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) - else: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name)) - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - if self.path: - cd_action = 'cd %s; ' % Sourceify(self.path) - else: - cd_action = '' - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:' - '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s' - % (name, cd_action, command)) - self.WriteLn() - outputs = map(self.Absolutify, outputs) - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)']) - self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)']) - self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), - part_of_all=part_of_all, command=name) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = 'action_%s_outputs' % name - self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % outputs_variable) - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs, part_of_all): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - for rule in rules: - name = self.target + '_' + rule['rule_name'] - count = 0 - self.WriteLn('### Generated for rule %s:' % name) - - all_outputs = [] - - for rule_source in rule['rule_sources']: - dirs = set() - rule_source_basename = os.path.basename(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root) - for out in rule['outputs']] - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get('process_outputs_as_sources', False)): - extra_sources.append(out) - all_outputs += outputs - inputs = map(Sourceify, map(self.Absolutify, [rule_source] + - rule.get('inputs', []))) - actions = ['$(call do_cmd,%s_%d)' % (name, count)] - - if name == 'resources_grit': - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ['@touch --no-create $@'] - - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)']) - self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)']) - self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions) - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - action = [self.ExpandInputRoot(ac, rule_source_root) - for ac in rule['action']] - mkdirs = '' - if len(dirs) > 0: - mkdirs = 'mkdir -p %s; ' % ' '.join(dirs) - if self.path: - cd_action = 'cd %s; ' % Sourceify(self.path) - else: - cd_action = '' - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = export LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" % { - 'action': gyp.common.EncodePOSIXShellList(action), - 'cd_action': cd_action, - 'count': count, - 'mkdirs': mkdirs, - 'name': name, - }) - self.WriteLn( - 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % { - 'count': count, - 'name': name, - }) - self.WriteLn() - count += 1 - - outputs_variable = 'rule_%s_outputs' % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append('$(%s)' % outputs_variable) - - self.WriteLn('### Finished generating for rule: %s' % name) - self.WriteLn() - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Generated for copy rule.') - - variable = self.target + '_copies' - outputs = [] - for copy in copies: - for path in copy['files']: - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.Absolutify(os.path.join(copy['destination'], - filename))) - self.WriteDoCmd([output], [path], 'copy', part_of_all) - outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteSources(self, configs, deps, sources, - extra_outputs, extra_link_deps, - part_of_all): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', - quoter=EscapeCppDefine) - self.WriteLn("# Flags passed to both C and C++ files."); - self.WriteList(config.get('cflags'), 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C (and not C++) files."); - self.WriteList(config.get('cflags_c'), 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ (and not C) files."); - self.WriteList(config.get('cflags_cc'), 'CFLAGS_CC_%s' % configname) - includes = config.get('include_dirs') - if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) - self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - - sources = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, sources))) - self.WriteList(objs, 'OBJS') - - self.WriteLn('# Add to the list of files we specially track ' - 'dependencies for.') - self.WriteLn('all_deps += $(OBJS)') - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule(['$(OBJS)'], deps, - comment = 'Make sure our dependencies are built ' - 'before any of us.', - order_only = True) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule(['$(OBJS)'], extra_outputs, - comment = 'Make sure our actions/rules run ' - 'before any of us.', - order_only = True) - - if objs: - extra_link_deps.append('$(OBJS)') - self.WriteLn("""\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""") - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn("$(OBJS): GYP_CFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_CXXFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))") - - self.WriteLn() - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - output = None - target = spec['target_name'] - target_prefix = '' - target_ext = '' - path = os.path.join('$(obj).' + self.toolset, self.path) - if self.type == 'static_library': - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.a' - elif self.type in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.so' - elif self.type == 'none': - target = '%s.stamp' % target - elif self.type == 'settings': - return None - elif self.type == 'executable': - path = os.path.join('$(builddir)') - else: - print ("ERROR: What output file should be generated?", - "typ", self.type, "target", target) - - path = spec.get('product_dir', path) - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - return os.path.join(path, target_prefix + target + target_ext) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.extend(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteTarget(self, spec, configs, deps, link_deps, extra_outputs, - part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn('### Rules for final target.') - - if extra_outputs: - self.WriteMakeRule([self.output], extra_outputs, - comment = 'Build our special outputs first.', - order_only = True) - self.WriteMakeRule(extra_outputs, deps, - comment=('Preserve order dependency of ' - 'special output on deps.'), - order_only = True, - multiple_output_trick = False) - - if self.type not in ('settings', 'none'): - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('ldflags'), 'LDFLAGS_%s' % configname) - libraries = spec.get('libraries') - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - self.WriteList(libraries, 'LIBS') - self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output) - self.WriteLn('%s: LIBS := $(LIBS)' % self.output) - - if self.type == 'executable': - self.WriteDoCmd([self.output], link_deps, 'link', part_of_all) - elif self.type == 'static_library': - self.WriteDoCmd([self.output], link_deps, 'alink', part_of_all) - elif self.type in ('loadable_module', 'shared_library'): - self.WriteDoCmd([self.output], link_deps, 'solink', part_of_all) - elif self.type == 'none': - # Write a stamp line. - self.WriteDoCmd([self.output], deps, 'touch', part_of_all) - elif self.type == 'settings': - # Only used for passing flags around. - pass - else: - print "WARNING: no output for", self.type, target - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if ((self.output and self.output != self.target) and - (self.type not in self._INSTALLABLE_TARGETS)): - self.WriteMakeRule([self.target], [self.output], - comment='Add target alias', phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [self.target], - comment = 'Add target alias to "all" target.', - phony = True) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if self.type in self._INSTALLABLE_TARGETS: - if self.type in ('shared_library'): - file_desc = 'shared library' - # Install all shared libs into a common directory (per toolset) for - # convenient access with LD_LIBRARY_PATH. - binpath = '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) - else: - file_desc = 'executable' - binpath = '$(builddir)/' + self.alias - installable_deps = [self.output] - # Point the target alias to the final binary output. - self.WriteMakeRule([self.target], [binpath], - comment='Add target alias', phony = True) - if binpath != self.output: - self.WriteDoCmd([binpath], [self.output], 'copy', - comment = 'Copy this to the %s output path.' % - file_desc, part_of_all=part_of_all) - installable_deps.append(binpath) - if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule([self.alias], installable_deps, - comment = 'Short alias for building this %s.' % - file_desc, phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [binpath], - comment = 'Add %s to "all" target.' % file_desc, - phony = True) - - - def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - self.fp.write(variable + " := ") - if list: - list = [quoter(prefix + l) for l in list] - self.fp.write(" \\\n\t".join(list)) - self.fp.write("\n\n") - - - def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - self.WriteMakeRule(outputs, inputs, - actions = ['$(call do_cmd,%s)' % command], - comment = comment, - force = True) - # Add our outputs to the list of targets we read depfiles from. - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - - def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, - order_only=False, force=False, phony=False, - multiple_output_trick=True): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - multiple_output_trick: if true (the default), perform tricks such as dummy - rules to avoid problems with multiple outputs. - """ - if comment: - self.WriteLn('# ' + comment) - if phony: - self.WriteLn('.PHONY: ' + ' '.join(outputs)) - # TODO(evanm): just make order_only a list of deps instead of these hacks. - if order_only: - order_insert = '| ' - else: - order_insert = '' - if force: - force_append = ' FORCE_DO_CMD' - else: - force_append = '' - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs), - force_append)) - if actions: - for action in actions: - self.WriteLn('\t%s' % action) - if multiple_output_trick and len(outputs) > 1: - # If we have more than one output, a rule like - # foo bar: baz - # that for *each* output we must run the action, potentially - # in parallel. That is not what we're trying to write -- what - # we want is that we run the action once and it generates all - # the files. - # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html - # discusses this problem and has this solution: - # 1) Write the naive rule that would produce parallel runs of - # the action. - # 2) Make the outputs seralized on each other, so we won't start - # a parallel run until the first run finishes, at which point - # we'll have generated all the outputs and we're done. - self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0])) - # Add a dummy command to the "extra outputs" rule, otherwise make seems to - # think these outputs haven't (couldn't have?) changed, and thus doesn't - # flag them as changed (i.e. include in '$?') when evaluating dependent - # rules, which in turn causes do_cmd() to skip running dependent commands. - self.WriteLn('%s: ;' % (' '.join(outputs[1:]))) - self.WriteLn() - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset) - return path - return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path) - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if '$(' in path: - return path - return os.path.normpath(os.path.join(self.path, path)) - - - def FixupArgPath(self, arg): - if '/' in arg or '.h.' in arg: - return self.Absolutify(arg) - return arg - - - def ExpandInputRoot(self, template, expansion): - if '%(INPUT_ROOT)s' not in template: - return template - path = template % { 'INPUT_ROOT': expansion } - if not os.path.dirname(path): - # If it's just the file name, turn it into a path so FixupArgPath() - # will know to Absolutify() it. - path = os.path.join('.', path) - return path - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join(options.generator_output, output_file) - return (base_path, output_file) - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'Makefile' + options.suffix - makefile_path = os.path.join(options.depth, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join(options.generator_output, makefile_path) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = '$(srcdir)/' - ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - root_makefile.write(SHARED_HEADER_SRCDIR % srcdir) - root_makefile.write(SHARED_HEADER_BUILDDIR_NAME % builddir_name) - root_makefile.write(SHARED_HEADER.replace('__default_configuration__', - default_configuration)) - for toolset in toolsets: - root_makefile.write('TOOLSET := %s\n' % toolset) - root_makefile.write(ROOT_HEADER_SUFFIX_RULES) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - build_files.add(gyp.common.RelativePath(build_file, options.depth)) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), options.depth) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - (base_path, output_file) = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - writer = MakefileWriter() - writer.Write(qualified_target, base_path, output_file, spec, configs, - part_of_all=qualified_target in needed_targets) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add('include ' + mkfile_rel_path + '\n') - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [target_dicts[target]['target_name'] for target in target_list - if target.startswith(build_file) and - target in needed_targets] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - (base_path, output_file) = CalculateMakefilePath(build_file, - os.path.splitext(os.path.basename(build_file))[0] + '.Makefile') - makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), - os.path.dirname(output_file)) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, - builddir_name) - - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include in sorted(include_list): - root_makefile.write(include) - root_makefile.write('\n') - - # Write the target to regenerate the Makefile. - if generator_flags.get('auto_regeneration', True): - build_files_args = [gyp.common.RelativePath(filename, options.depth) - for filename in params['build_files_arg']] - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.depth) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - root_makefile.write("%s: %s\n\t%s\n" % ( - makefile_name, - ' '.join(map(Sourceify, build_files)), - gyp.common.EncodePOSIXShellList( - [gyp_binary, '-fmake'] + - gyp.RegenerateFlags(options) + - build_files_args))) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/msvs.py b/mozc_build_tools/gyp/pylib/gyp/generator/msvs.py deleted file mode 100644 index 2c3ef97..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/msvs.py +++ /dev/null @@ -1,1216 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import ntpath -import posixpath -import os -import re -import subprocess -import sys - -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSVersion as MSVSVersion -import gyp.common - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$') - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '.exe', - 'STATIC_LIB_PREFIX': '', - 'SHARED_LIB_PREFIX': '', - 'STATIC_LIB_SUFFIX': '.lib', - 'SHARED_LIB_SUFFIX': '.dll', - 'INTERMEDIATE_DIR': '$(IntDir)', - 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate', - 'OS': 'win', - 'PRODUCT_DIR': '$(OutDir)', - 'LIB_DIR': '$(OutDir)/lib', - 'RULE_INPUT_ROOT': '$(InputName)', - 'RULE_INPUT_EXT': '$(InputExt)', - 'RULE_INPUT_NAME': '$(InputFileName)', - 'RULE_INPUT_PATH': '$(InputPath)', - 'CONFIGURATION_NAME': '$(ConfigurationName)', -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - 'msvs_cygwin_dirs', - 'msvs_props', -] - -generator_additional_non_configuration_keys = [ - 'msvs_cygwin_dirs', - 'msvs_cygwin_shell', -] - -cached_username = None -cached_domain = None - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ('win32', 'cygwin'): - return ('DOMAIN', 'USERNAME') - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get('USERDOMAIN') - username = os.environ.get('USERNAME') - if not domain or not username: - call = subprocess.Popen(['net', 'config', 'Workstation'], - stdout=subprocess.PIPE) - config = call.communicate()[0] - username_re = re.compile('^User name\s+(\S+)', re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - -fixpath_prefix = None - -def _FixPath(path): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': - path = os.path.join(fixpath_prefix, path) - path = path.replace('/', '\\') - if len(path) > 0 and path[-1] == '\\': - path = path[:-1] - return path - - -def _SourceInFolders(sources, prefix=None, excluded=None): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _SourceInFolders([['a', 'bob1.c'], ['b', 'bob2.c']], prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: prefix = [] - result = [] - excluded_result = [] - folders = dict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = '\\'.join(prefix + s) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - else: - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - # Add a folder for excluded files. - if excluded_result: - excluded_folder = MSVSProject.Filter('_excluded_files', - contents=excluded_result) - result.append(excluded_folder) - # Populate all the folders. - for f in folders: - contents = _SourceInFolders(folders[f], prefix=prefix + [f], - excluded=excluded) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: return - # TODO(bradnelson): ugly hack, fix this more generally!!! - if 'Directories' in setting or 'Dependencies' in setting: - if type(value) == str: - value = value.replace('/', '\\') - else: - value = [i.replace('/', '\\') for i in value] - if not tools.get(tool_name): - tools[tool_name] = dict() - tool = tools[tool_name] - if tool.get(setting): - if only_if_unset: return - if type(tool[setting]) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - 'not allowed, previous value: %s' % ( - value, setting, tool_name, str(tool[setting]))) - else: - tool[setting] = value - - -def _ConfigPlatform(config_data): - return config_data.get('msvs_configuration_platform', 'Win32') - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith('_' + platform_name): - return config_name[0:-len(platform_name)-1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) - - -def _PrepareActionRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd): - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [i.replace('$(IntDir)', - '`cygpath -m "${INTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(OutDir)', - '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] - if has_input_path: - direct_cmd = [i.replace('$(InputPath)', - '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd] - direct_cmd = ['"%s"' % i for i in direct_cmd] - direct_cmd = [i.replace('"', '\\"') for i in direct_cmd] - #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = ' '.join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = ( - '"$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - 'set CYGWIN=nontsec&& ') - if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: - cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' - if direct_cmd.find('INTDIR') >= 0: - cmd += 'set INTDIR=$(IntDir)&& ' - if direct_cmd.find('OUTDIR') >= 0: - cmd += 'set OUTDIR=$(OutDir)&& ' - if has_input_path and direct_cmd.find('INPUTPATH') >= 0: - cmd += 'set INPUTPATH=$(InputPath) && ' - cmd += ( - 'bash -c "%(cmd)s"') - cmd = cmd % {'cygwin_dir': cygwin_dir, - 'cmd': direct_cmd} - return cmd - else: - # Convert cat --> type to mimic unix. - if cmd[0] == 'cat': - cmd = ['type'] + cmd[1:] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - direct_cmd = ([cmd[0].replace('/', '\\')] + - ['"%s"' % _FixPath(i) for i in cmd[1:]]) - else: - direct_cmd = ([cmd[0].replace('/', '\\')] + - [_FixPath(i) for i in cmd[1:]]) - # Collapse into a single command. - return ' '.join(direct_cmd) - -def _PrepareAction(spec, rule, has_input_path): - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get('msvs_cygwin_shell') - if mcs is None: - mcs = int(spec.get('msvs_cygwin_shell', 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get('msvs_quote_cmd', 1)) - return _PrepareActionRaw(spec, rule['action'], mcs, - has_input_path, quote_cmd) - - -def _PickPrimaryInput(inputs): - # Pick second input as the primary one, unless there's only one. - # TODO(bradnelson): this is a bit of a hack, - # find something more general. - if len(inputs) > 1: - return inputs[1] - else: - return inputs[0] - -def _SetRunAs(user_file, config_name, c_data, command, - environment={}, working_directory=""): - """Add a run_as rule to the user file. - - Arguments: - user_file: The MSVSUserFile to add the command to. - config_name: The name of the configuration to add it to - c_data: The dict of the configuration to add it to - command: The path to the command to execute. - args: An array of arguments to the command. (optional) - working_directory: Directory to run the command in. (optional) - """ - user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), - command, environment, working_directory) - -def _AddCustomBuildTool(p, spec, inputs, outputs, description, cmd): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = [_FixPath(i) for i in inputs] - outputs = [_FixPath(i) for i in outputs] - tool = MSVSProject.Tool( - 'VCCustomBuildTool', { - 'Description': description, - 'AdditionalDependencies': ';'.join(inputs), - 'Outputs': ';'.join(outputs), - 'CommandLine': cmd, - }) - primary_input = _PickPrimaryInput(inputs) - # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): - p.AddFileConfig(primary_input, - _ConfigFullName(config_name, c_data), tools=[tool]) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace('$(InputName)', - os.path.splitext(os.path.split(input_file)[1])[0]) - path = path.replace('$(InputExt)', - os.path.splitext(os.path.split(input_file)[1])[1]) - path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) - path = path.replace('$(InputPath)', input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - rule_ext = rule['extension'] - return [s for s in sources if s.endswith('.' + rule_ext)] - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = rule.get('inputs', []) - raw_outputs = rule.get('outputs', []) - inputs = set() - outputs = set() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRules(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = '%s%s.rules' % (spec['target_name'], - options.suffix) - rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename)) - rules_file.Create(spec['target_name']) - # Add each rule. - for r in rules: - rule_name = r['rule_name'] - rule_ext = r['extension'] - inputs = [_FixPath(i) for i in r.get('inputs', [])] - outputs = [_FixPath(i) for i in r.get('outputs', [])] - cmd = _PrepareAction(spec, r, has_input_path=True) - rules_file.AddCustomBuildRule(name=rule_name, - description=r.get('message', rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd) - # Write out rules file. - rules_file.Write() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace('$(OutDir)', '$(OutDirCygwin)') - path = path.replace('$(IntDir)', '$(IntDirCygwin)') - return path - - -def _GenerateExternalRules(p, rules, output_dir, spec, - sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - p: the target project - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - """ - filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) - file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = set() - all_outputs = set() - all_output_dirs = set() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(set(inputs)) - all_outputs.update(set(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(list(outputs)[0]) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - file.write('all: %s\n' % ' '.join(first_outputs_cyg)) - for od in all_output_dirs: - file.write('\tmkdir -p %s\n' % od) - file.write('\n') - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule['action']] - cmd = ['"%s"' % i for i in cmd] - cmd = ' '.join(cmd) - # Add it to the makefile. - file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) - file.write('\t%s\n\n' % cmd) - # Close up the file. - file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = ['make', - 'OutDir=$(OutDir)', - 'IntDir=$(IntDir)', - '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', - '-f', filename] - cmd = _PrepareActionRaw(spec, cmd, True, False, True) - # TODO(bradnelson): this won't be needed if we have a better way to pick - # the primary input. - all_inputs = list(all_inputs) - all_inputs.insert(1, filename) - actions_to_add.append({ - 'inputs': [_FixPath(i) for i in all_inputs], - 'outputs': [_FixPath(i) for i in all_outputs], - 'description': 'Running %s' % cmd, - 'cmd': cmd, - }) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this.""" - s = s.replace('%', '%%') - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') -def _EscapeCommandLineArgument(s): - """Escapes a Windows command-line argument, so that the Win32 - CommandLineToArgv function will turn the escaped result back into the - original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this.""" - def replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') -def _EscapeVCProjCommandLineArgListItem(s): - """The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention.""" - def replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - list = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(list), 2): - list[i] = delimiters_replacer_regex.sub(replace, list[i]) - # Concatenate back into a single string - s = '"'.join(list) - if len(list) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) - return s - - -def _EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgument(s) - s = _EscapeVCProjCommandLineArgListItem(s) - return s - - -def _GenerateRules(p, output_dir, options, spec, - sources, excluded_sources, - actions_to_add): - """Generate all the rules for a particular project. - - Arguments: - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRules(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules(p, rules_external, output_dir, spec, - sources, options, actions_to_add) - - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Done if not processing outputs as sources. - if int(rule.get('process_outputs_as_sources', False)): - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs.remove(tf) - sources.update(inputs) - excluded_sources.update(inputs) - sources.update(outputs) - - -def _GenerateProject(vcproj_filename, build_file, spec, options, version): - """Generates a vcproj file. - - Arguments: - vcproj_filename: Filename of the vcproj file to generate. - build_file: Filename of the .gyp file that the vcproj file comes from. - spec: The target dictionary containing the properties of the target. - """ - # Pluck out the default configuration. - default_config = spec['configurations'][spec['default_configuration']] - # Decide the guid of the project. - guid = default_config.get('msvs_guid') - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) == None: - raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % - (guid, VALID_MSVS_GUID_CHARS.pattern)) - guid = '{%s}' % guid - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get('msvs_existing_vcproj'): - return guid - - #print 'Generating %s' % vcproj_filename - - vcproj_dir = os.path.dirname(vcproj_filename) - if vcproj_dir and not os.path.exists(vcproj_dir): - os.makedirs(vcproj_dir) - - # Gather list of unique platforms. - platforms = set() - for configuration in spec['configurations']: - platforms.add(_ConfigPlatform(spec['configurations'][configuration])) - platforms = list(platforms) - - p = MSVSProject.Writer(vcproj_filename, version=version) - p.Create(spec['target_name'], guid=guid, platforms=platforms) - - # Create the user file. - (domain, username) = _GetDomainAndUserName() - vcuser_filename = '.'.join([vcproj_filename, domain, username, 'user']) - user_file = MSVSUserFile.Writer(vcuser_filename, version=version) - user_file.Create(spec['target_name']) - - # Get directory project file is in. - gyp_dir = os.path.split(vcproj_filename)[0] - - # Pick target configuration type. - try: - config_type = { - 'executable': '1', # .exe - 'shared_library': '2', # .dll - 'loadable_module': '2', # .dll - 'static_library': '4', # .lib - 'none': '10', # Utility type - 'dummy_executable': '1', # .exe - }[spec['type']] - except KeyError, e: - if spec.get('type'): - raise Exception('Target type %s is not a valid target type for ' - 'target %s in %s.' % - (spec['type'], spec['target_name'], build_file)) - else: - raise Exception('Missing type field for target %s in %s.' % - (spec['target_name'], build_file)) - - for config_name, c in spec['configurations'].iteritems(): - # Process each configuration. - vsprops_dirs = c.get('msvs_props', []) - vsprops_dirs = [_FixPath(i) for i in vsprops_dirs] - - # Prepare the list of tools as a dictionary. - tools = dict() - - # Add in msvs_settings. - for tool in c.get('msvs_settings', {}): - settings = c['msvs_settings'][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - - # Add in includes. - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = ( - c.get('include_dirs', []) + - c.get('msvs_system_include_dirs', [])) - resource_include_dirs = c.get('resource_include_dirs', include_dirs) - include_dirs = [_FixPath(i) for i in include_dirs] - resource_include_dirs = [_FixPath(i) for i in resource_include_dirs] - _ToolAppend(tools, 'VCCLCompilerTool', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'AdditionalIncludeDirectories', resource_include_dirs) - - # Add in libraries. - libraries = spec.get('libraries', []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - libraries = [re.sub('^(\-l)', '', lib) for lib in libraries] - # Add them. - _ToolAppend(tools, 'VCLinkerTool', - 'AdditionalDependencies', libraries) - - # Select a name for the output file. - output_file_map = { - 'executable': ('VCLinkerTool', '$(OutDir)\\', '.exe'), - 'shared_library': ('VCLinkerTool', '$(OutDir)\\', '.dll'), - 'loadable_module': ('VCLinkerTool', '$(OutDir)\\', '.dll'), - 'static_library': ('VCLibrarianTool', '$(OutDir)\\lib\\', '.lib'), - 'dummy_executable': ('VCLinkerTool', '$(IntDir)\\', '.junk'), - } - output_file_props = output_file_map.get(spec['type']) - if output_file_props and int(spec.get('msvs_auto_output_file', 1)): - vc_tool, out_dir, suffix = output_file_props - out_dir = spec.get('product_dir', out_dir) - product_extension = spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - _ToolAppend(tools, vc_tool, 'OutputFile', out_file, - only_if_unset=True) - - # Add defines. - defines = [] - for d in c.get('defines', []): - if type(d) == list: - fd = '='.join([str(dpart) for dpart in d]) - else: - fd = str(d) - fd = _EscapeCppDefine(fd) - defines.append(fd) - - _ToolAppend(tools, 'VCCLCompilerTool', - 'PreprocessorDefinitions', defines) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'PreprocessorDefinitions', defines) - - # Change program database directory to prevent collisions. - _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', - '$(IntDir)\\$(ProjectName)\\vc80.pdb') - - # Add disabled warnings. - disabled_warnings = [str(i) for i in c.get('msvs_disabled_warnings', [])] - _ToolAppend(tools, 'VCCLCompilerTool', - 'DisableSpecificWarnings', disabled_warnings) - - # Add Pre-build. - prebuild = c.get('msvs_prebuild') - _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) - - # Add Post-build. - postbuild = c.get('msvs_postbuild') - _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) - - # Turn on precompiled headers if appropriate. - header = c.get('msvs_precompiled_header') - if header: - header = os.path.split(header)[1] - _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') - _ToolAppend(tools, 'VCCLCompilerTool', - 'PrecompiledHeaderThrough', header) - _ToolAppend(tools, 'VCCLCompilerTool', - 'ForcedIncludeFiles', header) - - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') - - # Set the module definition file if any. - if spec['type'] in ['shared_library', 'loadable_module']: - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', - _FixPath(def_files[0])) - elif def_files: - raise ValueError('Multiple module definition files in one target, ' - 'target %s lists multiple .def files: %s' % ( - spec['target_name'], ' '.join(def_files))) - - # Convert tools to expected form. - tool_list = [] - for tool, settings in tools.iteritems(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.iteritems(): - if type(value) == list: - if ((tool == 'VCLinkerTool' and - setting == 'AdditionalDependencies') or - setting == 'AdditionalOptions'): - settings_fixed[setting] = ' '.join(value) - else: - settings_fixed[setting] = ';'.join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = c.get('msvs_configuration_attributes', {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - if vsprops_dirs: - prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) - # Set configuration type. - prepared_attrs['ConfigurationType'] = config_type - if not prepared_attrs.has_key('OutputDirectory'): - prepared_attrs['OutputDirectory'] = '$(SolutionDir)$(ConfigurationName)' - if not prepared_attrs.has_key('IntermediateDirectory'): - intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' - prepared_attrs['IntermediateDirectory'] = intermediate - - # Add in this configuration. - p.AddConfig(_ConfigFullName(config_name, c), - attrs=prepared_attrs, tools=tool_list) - - # Prepare list of sources and excluded sources. - sources = set(spec.get('sources', [])) - excluded_sources = set() - # Add in the gyp file. - gyp_file = os.path.split(build_file)[1] - sources.add(gyp_file) - # Add in 'action' inputs and outputs. - for a in spec.get('actions', []): - inputs = a.get('inputs') - if not inputs: - # This is an action with no inputs. Make the primary input - # by the .gyp file itself so Visual Studio has a place to - # hang the custom build rule. - inputs = [gyp_file] - a['inputs'] = inputs - primary_input = _PickPrimaryInput(inputs) - inputs = set(inputs) - sources.update(inputs) - inputs.remove(primary_input) - excluded_sources.update(inputs) - if int(a.get('process_outputs_as_sources', False)): - outputs = set(a.get('outputs', [])) - sources.update(outputs) - # Add in 'copies' inputs and outputs. - for cpy in spec.get('copies', []): - files = set(cpy.get('files', [])) - sources.update(files) - - # Add rules. - actions_to_add = [] - _GenerateRules(p, gyp_dir, options, spec, - sources, excluded_sources, - actions_to_add) - - # Exclude excluded sources coming into the generator. - excluded_sources.update(set(spec.get('sources_excluded', []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = [_FixPath(i) for i in sources] - # Convert to proper windows form. - excluded_sources = [_FixPath(i) for i in excluded_sources] - - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get('rules', []): - if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): - using_idl = True - break - if using_idl: - excluded_idl = [i for i in sources if i.endswith('.idl')] - else: - excluded_idl = [] - - # List of precompiled header related keys. - precomp_keys = [ - 'msvs_precompiled_header', - 'msvs_precompiled_source', - ] - - # Gather a list of precompiled header related sources. - precompiled_related = [] - for config_name, c in spec['configurations'].iteritems(): - for k in precomp_keys: - f = c.get(k) - if f: - precompiled_related.append(_FixPath(f)) - - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split('\\') for i in sources] - sources = _SourceInFolders(sources, excluded=fully_excluded) - # Add in dummy file for type none. - if spec['type'] == 'dummy_executable': - # Pull in a dummy main so it can link successfully. - dummy_relpath = gyp.common.RelativePath( - options.depth + '\\tools\\gyp\\gyp_dummy.c', gyp_dir) - sources.append(dummy_relpath) - # Add in files. - p.AddFiles(sources) - - # Add deferred actions to add. - for a in actions_to_add: - _AddCustomBuildTool(p, spec, - inputs=a['inputs'], - outputs=a['outputs'], - description=a['description'], - cmd=a['cmd']) - - # Exclude excluded sources from being built. - for f in excluded_sources: - for config_name, c in spec['configurations'].iteritems(): - precomped = [_FixPath(c.get(i, '')) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - p.AddFileConfig(f, _ConfigFullName(config_name, c), - {'ExcludedFromBuild': 'true'}) - - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for config_name, c in spec['configurations'].iteritems(): - for f in excluded_idl: - p.AddFileConfig(f, _ConfigFullName(config_name, c), - {'ExcludedFromBuild': 'true'}) - - # Add in tool files (rules). - tool_files = set() - for config_name, c in spec['configurations'].iteritems(): - for f in c.get('msvs_tool_files', []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - # Handle pre-compiled headers source stubs specially. - for config_name, c in spec['configurations'].iteritems(): - source = c.get('msvs_precompiled_source') - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '1'}) - p.AddFileConfig(source, _ConfigFullName(config_name, c), - {}, tools=[tool]) - - # Add actions. - actions = spec.get('actions', []) - for a in actions: - cmd = _PrepareAction(spec, a, has_input_path=False) - _AddCustomBuildTool(p, spec, - inputs=a.get('inputs', []), - outputs=a.get('outputs', []), - description=a.get('message', a['action_name']), - cmd=cmd) - - # Add run_as and test targets. - has_run_as = False - if spec.get('run_as') or int(spec.get('test', 0)): - has_run_as = True - run_as = spec.get('run_as', { - 'action' : ['$(TargetPath)', '--gtest_print_time'], - }) - working_directory = run_as.get('working_directory', '.') - action = run_as.get('action', []) - environment = run_as.get('environment', []) - for config_name, c_data in spec['configurations'].iteritems(): - _SetRunAs(user_file, config_name, c_data, - action, environment, working_directory) - - # Add copies. - for cpy in spec.get('copies', []): - for src in cpy.get('files', []): - dst = os.path.join(cpy['destination'], os.path.basename(src)) - # _AddCustomBuildTool() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith('/'): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( - _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) - _AddCustomBuildTool(p, spec, - inputs=[src, build_file], - outputs=['dummy_copies', dst], - description='Copying %s to %s' % (src, dst), - cmd=cmd) - else: - cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) - _AddCustomBuildTool(p, spec, - inputs=[src], outputs=[dst], - description='Copying %s to %s' % (src, dst), - cmd=cmd) - - # Write it out. - p.Write() - - # Write out the user file, but only if we need to. - if has_run_as: - user_file.Write() - - # Return the guid so we can refer to it elsewhere. - return p.guid - - -def _GetPathDict(root, path): - if path == '': - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = dict() - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.iteritems(): - if type(contents) == dict: - folder_children = _DictsToFolders(os.path.join(base_path, folder), - contents, flat) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), - name='(' + folder + ')', - entries=folder_children) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if (type(node) == dict and - len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] - if type(node) != dict: - return node - for child in node.keys(): - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(project_objs, flat): - root = {} - # Convert into a tree of dicts on path. - for p in project_objs.keys(): - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + '.vcproj'] = project_objs[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] - # Collapse singles. - root = _CollapseSingles('', root) - # Merge buckets until everything is a root entry. - return _DictsToFolders('', root, flat) - - -def _ProjectObject(sln, qualified_target, project_objs, projects): - # Done if this project has an object. - if project_objs.get(qualified_target): - return project_objs[qualified_target] - # Get dependencies for this project. - spec = projects[qualified_target]['spec'] - deps = spec.get('dependencies', []) - # Get objects for each dependency. - deps = [_ProjectObject(sln, d, project_objs, projects) for d in deps] - # Find relative path to vcproj from sln. - vcproj_rel_path = gyp.common.RelativePath( - projects[qualified_target]['vcproj_path'], os.path.split(sln)[0]) - vcproj_rel_path = _FixPath(vcproj_rel_path) - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get('msvs_target_platform', _ConfigPlatform(c)) - fixed_config_fullname = '%s|%s' % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) - config_platform_overrides[config_fullname] = fixed_config_fullname - # Create object for this project. - obj = MSVSNew.MSVSProject( - vcproj_rel_path, - name=spec['target_name'], - guid=projects[qualified_target]['guid'], - dependencies=deps, - config_platform_overrides=config_platform_overrides) - # Store it to the list of objects. - project_objs[qualified_target] = obj - # Return project object. - return obj - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get('generator_flags', {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = \ - MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version', - 'auto')) - # Stash msvs_version for later (so we don't have to probe the system twice). - params['msvs_version'] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or - os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params['options'] - generator_flags = params.get('generator_flags', {}) - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params['msvs_version'] - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - build_file = gyp.common.BuildFile(qualified_target) - spec = target_dicts[qualified_target] - for config_name, c in spec['configurations'].iteritems(): - configs.add(_ConfigFullName(config_name, c)) - configs = list(configs) - - # Generate each project. - projects = {} - for qualified_target in target_list: - build_file = gyp.common.BuildFile(qualified_target) - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in msvs build (target %s)' % - qualified_target) - default_config = spec['configurations'][spec['default_configuration']] - vcproj_filename = default_config.get('msvs_existing_vcproj') - if not vcproj_filename: - vcproj_filename = spec['target_name'] + options.suffix + '.vcproj' - vcproj_path = os.path.join(os.path.split(build_file)[0], vcproj_filename) - if options.generator_output: - projectDirPath = os.path.dirname(os.path.abspath(vcproj_path)) - vcproj_path = os.path.join(options.generator_output, vcproj_path) - fixpath_prefix = gyp.common.RelativePath(projectDirPath, - os.path.dirname(vcproj_path)) - projects[qualified_target] = { - 'vcproj_path': vcproj_path, - 'guid': _GenerateProject(vcproj_path, build_file, - spec, options, version=msvs_version), - 'spec': spec, - } - - fixpath_prefix = None - - for build_file in data.keys(): - # Validate build_file extension - if build_file[-4:] != '.gyp': - continue - sln_path = build_file[:-4] + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - #print 'Generating %s' % sln_path - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Convert projects to Project Objects. - project_objs = {} - for p in sln_projects: - _ProjectObject(sln_path, p, project_objs, projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - project_objs, flat=msvs_version.FlatSolution()) - # Create solution. - sln = MSVSNew.MSVSSolution(sln_path, - entries=root_entries, - variants=configs, - websiteProperties=False, - version=msvs_version) - sln.Write() diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/scons.py b/mozc_build_tools/gyp/pylib/gyp/generator/scons.py deleted file mode 100644 index c5338e9..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/scons.py +++ /dev/null @@ -1,1047 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp -import gyp.common -import gyp.SCons as SCons -import os.path -import pprint -import re - - -# TODO: remove when we delete the last WriteList() call in this module -WriteList = SCons.WriteList - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': '${LIBPREFIX}', - 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}', - 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}', - 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}', - 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}', - 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}', - 'OS': 'linux', - 'PRODUCT_DIR': '$TOP_BUILDDIR', - 'SHARED_LIB_DIR': '$LIB_DIR', - 'LIB_DIR': '$LIB_DIR', - 'RULE_INPUT_ROOT': '${SOURCE.filebase}', - 'RULE_INPUT_EXT': '${SOURCE.suffix}', - 'RULE_INPUT_NAME': '${SOURCE.file}', - 'RULE_INPUT_PATH': '${SOURCE.abspath}', - 'CONFIGURATION_NAME': '${CONFIG_NAME}', -} - -# Tell GYP how to process the input for us. -generator_handles_variants = True -generator_wants_absolute_build_file_paths = True - - -def FixPath(path, prefix): - if not os.path.isabs(path) and not path[0] == '$': - path = prefix + path - return path - - -header = """\ -# This file is generated; do not edit. -""" - - -_alias_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Alias( - ['_%(target_name)s_action'], - %(inputs)s, - _action -) -env.AlwaysBuild(_outputs) -""" - -_run_as_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -""" - -_run_as_template_suffix = """ -_run_as_target = env.Alias('run_%(target_name)s', target_files, _action) -env.Requires(_run_as_target, [ - Alias('%(target_name)s'), -]) -env.AlwaysBuild(_run_as_target) -""" - -_command_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Command( - %(outputs)s, - %(inputs)s, - _action -) -""" - -# This is copied from the default SCons action, updated to handle symlinks. -_copy_action_template = """ -import shutil -import SCons.Action - -def _copy_files_or_dirs_or_symlinks(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.islink(src): - linkto = os.readlink(src) - os.symlink(linkto, dest) - return 0 - elif os.path.isfile(src): - return shutil.copy2(src, dest) - else: - return shutil.copytree(src, dest, 1) - -def _copy_files_or_dirs_or_symlinks_str(dest, src): - return 'Copying %s to %s ...' % (src, dest) - -GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks, - _copy_files_or_dirs_or_symlinks_str, - convert=str) -""" - -_rule_template = """ -%(name)s_additional_inputs = %(inputs)s -%(name)s_outputs = %(outputs)s -def %(name)s_emitter(target, source, env): - return (%(name)s_outputs, source + %(name)s_additional_inputs) -if GetOption('verbose'): - %(name)s_action = Action([%(action)s]) -else: - %(name)s_action = Action([%(action)s], %(message)s) -env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action, - emitter=%(name)s_emitter) - -_outputs = [] -_processed_input_files = [] -for infile in input_files: - if (type(infile) == type('') - and not os.path.isabs(infile) - and not infile[0] == '$'): - infile = %(src_dir)r + infile - if str(infile).endswith('.%(extension)s'): - _generated = env.%(name)s(infile) - env.Precious(_generated) - _outputs.append(_generated) - %(process_outputs_as_sources_line)s - else: - _processed_input_files.append(infile) -prerequisites.extend(_outputs) -input_files = _processed_input_files -""" - -_spawn_hack = """ -import re -import SCons.Platform.posix -needs_shell = re.compile('["\\'>= 2.5: - return os.sysconf('SC_NPROCESSORS_ONLN') - else: # Mac OS X with Python < 2.5: - return int(os.popen2("sysctl -n hw.ncpu")[1].read()) - # Windows: - if os.environ.has_key('NUMBER_OF_PROCESSORS'): - return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1) - return 1 # Default - -# Support PROGRESS= to show progress in different ways. -p = ARGUMENTS.get('PROGRESS') -if p == 'spinner': - Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'], - interval=5, - file=open('/dev/tty', 'w')) -elif p == 'name': - Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w')) - -# Set the default -j value based on the number of processors. -SetOption('num_jobs', GetProcessorCount() + 1) - -# Have SCons use its cached dependency information. -SetOption('implicit_cache', 1) - -# Only re-calculate MD5 checksums if a timestamp has changed. -Decider('MD5-timestamp') - -# Since we set the -j value by default, suppress SCons warnings about being -# unable to support parallel build on versions of Python with no threading. -default_warnings = ['no-no-parallel-support'] -SetOption('warn', default_warnings + GetOption('warn')) - -AddOption('--mode', nargs=1, dest='conf_list', default=[], - action='append', help='Configuration to build.') - -AddOption('--verbose', dest='verbose', default=False, - action='store_true', help='Verbose command-line output.') - - -# -sconscript_file_map = %(sconscript_files)s - -class LoadTarget: - ''' - Class for deciding if a given target sconscript is to be included - based on a list of included target names, optionally prefixed with '-' - to exclude a target name. - ''' - def __init__(self, load): - ''' - Initialize a class with a list of names for possible loading. - - Arguments: - load: list of elements in the LOAD= specification - ''' - self.included = set([c for c in load if not c.startswith('-')]) - self.excluded = set([c[1:] for c in load if c.startswith('-')]) - - if not self.included: - self.included = set(['all']) - - def __call__(self, target): - ''' - Returns True if the specified target's sconscript file should be - loaded, based on the initialized included and excluded lists. - ''' - return (target in self.included or - ('all' in self.included and not target in self.excluded)) - -if 'LOAD' in ARGUMENTS: - load = ARGUMENTS['LOAD'].split(',') -else: - load = [] -load_target = LoadTarget(load) - -sconscript_files = [] -for target, sconscript in sconscript_file_map.iteritems(): - if load_target(target): - sconscript_files.append(sconscript) - - -target_alias_list= [] - -conf_list = GetOption('conf_list') -if conf_list: - # In case the same --mode= value was specified multiple times. - conf_list = list(set(conf_list)) -else: - conf_list = [%(default_configuration)r] - -sconsbuild_dir = Dir(%(sconsbuild_dir)s) - - -def FilterOut(self, **kw): - kw = SCons.Environment.copy_non_reserved_keywords(kw) - for key, val in kw.items(): - envval = self.get(key, None) - if envval is None: - # No existing variable in the environment, so nothing to delete. - continue - - for vremove in val: - # Use while not if, so we can handle duplicates. - while vremove in envval: - envval.remove(vremove) - - self[key] = envval - - # TODO(sgk): SCons.Environment.Append() has much more logic to deal - # with various types of values. We should handle all those cases in here - # too. (If variable is a dict, etc.) - - -non_compilable_suffixes = { - 'LINUX' : set([ - '.bdic', - '.css', - '.dat', - '.fragment', - '.gperf', - '.h', - '.hh', - '.hpp', - '.html', - '.hxx', - '.idl', - '.in', - '.in0', - '.in1', - '.js', - '.mk', - '.rc', - '.sigs', - '', - ]), - 'WINDOWS' : set([ - '.h', - '.hh', - '.hpp', - '.dat', - '.idl', - '.in', - '.in0', - '.in1', - ]), -} - -def compilable(env, file): - base, ext = os.path.splitext(str(file)) - if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]: - return False - return True - -def compilable_files(env, sources): - return [x for x in sources if compilable(env, x)] - -def GypProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def GypTestProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(*result) - return result - -def GypLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Library(target, source, *args, **kw) - return result - -def GypLoadableModule(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.LoadableModule(target, source, *args, **kw) - return result - -def GypStaticLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.StaticLibrary(target, source, *args, **kw) - return result - -def GypSharedLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.SharedLibrary(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def add_gyp_methods(env): - env.AddMethod(GypProgram) - env.AddMethod(GypTestProgram) - env.AddMethod(GypLibrary) - env.AddMethod(GypLoadableModule) - env.AddMethod(GypStaticLibrary) - env.AddMethod(GypSharedLibrary) - - env.AddMethod(FilterOut) - - env.AddMethod(compilable) - - -base_env = Environment( - tools = %(scons_tools)s, - INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate', - LIB_DIR='$TOP_BUILDDIR/lib', - OBJ_DIR='$TOP_BUILDDIR/obj', - SCONSBUILD_DIR=sconsbuild_dir.abspath, - SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate', - SRC_DIR=Dir(%(src_dir)r), - TARGET_PLATFORM='LINUX', - TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME', - LIBPATH=['$LIB_DIR'], -) - -if not GetOption('verbose'): - base_env.SetDefault( - ARCOMSTR='Creating library $TARGET', - ASCOMSTR='Assembling $TARGET', - CCCOMSTR='Compiling $TARGET', - CONCATSOURCECOMSTR='ConcatSource $TARGET', - CXXCOMSTR='Compiling $TARGET', - LDMODULECOMSTR='Building loadable module $TARGET', - LINKCOMSTR='Linking $TARGET', - MANIFESTCOMSTR='Updating manifest for $TARGET', - MIDLCOMSTR='Compiling IDL $TARGET', - PCHCOMSTR='Precompiling $TARGET', - RANLIBCOMSTR='Indexing $TARGET', - RCCOMSTR='Compiling resource $TARGET', - SHCCCOMSTR='Compiling $TARGET', - SHCXXCOMSTR='Compiling $TARGET', - SHLINKCOMSTR='Linking $TARGET', - SHMANIFESTCOMSTR='Updating manifest for $TARGET', - ) - -add_gyp_methods(base_env) - -for conf in conf_list: - env = base_env.Clone(CONFIG_NAME=conf) - SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath) - for sconscript in sconscript_files: - target_alias = env.SConscript(sconscript, exports=['env']) - if target_alias: - target_alias_list.extend(target_alias) - -Default(Alias('all', target_alias_list)) - -help_fmt = ''' -Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ... - -Local command-line build options: - --mode=CONFIG Configuration to build: - --mode=Debug [default] - --mode=Release - --verbose Print actual executed command lines. - -Supported command-line build variables: - LOAD=[module,...] Comma-separated list of components to load in the - dependency graph ('-' prefix excludes) - PROGRESS=type Display a progress indicator: - name: print each evaluated target name - spinner: print a spinner every 5 targets - -The following TARGET names can also be used as LOAD= module names: - -%%s -''' - -if GetOption('help'): - def columnar_text(items, width=78, indent=2, sep=2): - result = [] - colwidth = max(map(len, items)) + sep - cols = (width - indent) / colwidth - if cols < 1: - cols = 1 - rows = (len(items) + cols - 1) / cols - indent = '%%*s' %% (indent, '') - sep = indent - for row in xrange(0, rows): - result.append(sep) - for i in xrange(row, len(items), rows): - result.append('%%-*s' %% (colwidth, items[i])) - sep = '\\n' + indent - result.append('\\n') - return ''.join(result) - - load_list = set(sconscript_file_map.keys()) - target_aliases = set(map(str, target_alias_list)) - - common = load_list and target_aliases - load_only = load_list - common - target_only = target_aliases - common - help_text = [help_fmt %% columnar_text(sorted(list(common)))] - if target_only: - fmt = "The following are additional TARGET names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(target_only)))) - if load_only: - fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(load_only)))) - Help(''.join(help_text)) -""" - -# TEMPLATE END -############################################################################# - - -def GenerateSConscriptWrapper(build_file, build_file_data, name, - output_filename, sconscript_files, - default_configuration): - """ - Generates the "wrapper" SConscript file (analogous to the Visual Studio - solution) that calls all the individual target SConscript files. - """ - output_dir = os.path.dirname(output_filename) - src_dir = build_file_data['_DEPTH'] - src_dir_rel = gyp.common.RelativePath(src_dir, output_dir) - if not src_dir_rel: - src_dir_rel = '.' - scons_settings = build_file_data.get('scons_settings', {}) - sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#') - scons_tools = scons_settings.get('tools', ['default']) - - sconscript_file_lines = ['dict('] - for target in sorted(sconscript_files.keys()): - sconscript = sconscript_files[target] - sconscript_file_lines.append(' %s = %r,' % (target, sconscript)) - sconscript_file_lines.append(')') - - fp = open(output_filename, 'w') - fp.write(header) - fp.write(_wrapper_template % { - 'default_configuration' : default_configuration, - 'name' : name, - 'scons_tools' : repr(scons_tools), - 'sconsbuild_dir' : repr(sconsbuild_dir), - 'sconscript_files' : '\n'.join(sconscript_file_lines), - 'src_dir' : src_dir_rel, - }) - fp.close() - - # Generate the SConstruct file that invokes the wrapper SConscript. - dir, fname = os.path.split(output_filename) - SConstruct = os.path.join(dir, 'SConstruct') - fp = open(SConstruct, 'w') - fp.write(header) - fp.write('SConscript(%s)\n' % repr(fname)) - fp.close() - - -def TargetFilename(target, build_file=None, output_suffix=''): - """Returns the .scons file name for the specified target. - """ - if build_file is None: - build_file, target = gyp.common.ParseQualifiedTarget(target)[:2] - output_file = os.path.join(os.path.dirname(build_file), - target + output_suffix + '.scons') - return output_file - - -def GenerateOutput(target_list, target_dicts, data, params): - """ - Generates all the output files for the specified targets. - """ - options = params['options'] - - if options.generator_output: - def output_path(filename): - return filename.replace(params['cwd'], options.generator_output) - else: - def output_path(filename): - return filename - - default_configuration = None - - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in scons build (target %s)' % - qualified_target) - scons_target = SCons.Target(spec) - if scons_target.is_ignored: - continue - - # TODO: assumes the default_configuration of the first target - # non-Default target is the correct default for all targets. - # Need a better model for handle variation between targets. - if (not default_configuration and - spec['default_configuration'] != 'Default'): - default_configuration = spec['default_configuration'] - - build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2] - output_file = TargetFilename(target, build_file, options.suffix) - if options.generator_output: - output_file = output_path(output_file) - - if not spec.has_key('libraries'): - spec['libraries'] = [] - - # Add dependent static library targets to the 'libraries' value. - deps = spec.get('dependencies', []) - spec['scons_dependencies'] = [] - for d in deps: - td = target_dicts[d] - target_name = td['target_name'] - spec['scons_dependencies'].append("Alias('%s')" % target_name) - if td['type'] in ('static_library', 'shared_library'): - libname = td.get('product_name', target_name) - spec['libraries'].append('lib' + libname) - if td['type'] == 'loadable_module': - prereqs = spec.get('scons_prerequisites', []) - # TODO: parameterize with <(SHARED_LIBRARY_*) variables? - td_target = SCons.Target(td) - td_target.target_prefix = '${SHLIBPREFIX}' - td_target.target_suffix = '${SHLIBSUFFIX}' - - GenerateSConscript(output_file, spec, build_file, data[build_file]) - - if not default_configuration: - default_configuration = 'Default' - - for build_file in sorted(data.keys()): - path, ext = os.path.splitext(build_file) - if ext != '.gyp': - continue - output_dir, basename = os.path.split(path) - output_filename = path + '_main' + options.suffix + '.scons' - - all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file) - sconscript_files = {} - for t in all_targets: - scons_target = SCons.Target(target_dicts[t]) - if scons_target.is_ignored: - continue - bf, target = gyp.common.ParseQualifiedTarget(t)[:2] - target_filename = TargetFilename(target, bf, options.suffix) - tpath = gyp.common.RelativePath(target_filename, output_dir) - sconscript_files[target] = tpath - - output_filename = output_path(output_filename) - if sconscript_files: - GenerateSConscriptWrapper(build_file, data[build_file], basename, - output_filename, sconscript_files, - default_configuration) diff --git a/mozc_build_tools/gyp/pylib/gyp/generator/xcode.py b/mozc_build_tools/gyp/pylib/gyp/generator/xcode.py deleted file mode 100644 index ff28ef2..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/generator/xcode.py +++ /dev/null @@ -1,1139 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import errno -import os -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = 'INTERMEDIATE_DIR' - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.dylib', - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, - 'OS': 'mac', - 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', - 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', - 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', - 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', - 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', - 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', - 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, - 'CONFIGURATION_NAME': '$(CONFIGURATION)', -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - 'mac_bundle_resources', - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - 'mac_bundle', - 'mac_bundle_resources', - 'xcode_create_dependents_test_runner', -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - 'mac_bundle_resources', -] - - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ - 'name': configuration_name}) - xccl.AppendProperty('buildConfigurations', xcbc) - xccl.SetProperty('defaultConfigurationName', configuration_names[0]) - return xccl - - -class XcodeProject(object): - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or '.') - self.project.SetProperty('projectDirPath', projectDirPath) - self.project_file = \ - gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError, e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty('targets'): - xccl = xct.GetProperty('buildConfigurationList') - xcbcs = xccl.GetProperty('buildConfigurations') - for xcbc in xcbcs: - name = xcbc.GetProperty('name') - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty('buildConfigurationList', xccl) - except: - import sys - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting(_intermediate_var, - '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') - xccl.SetBuildSetting(_shared_intermediate_var, - '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') - - # Set user-specified project-wide build settings. This is intended to be - # used very sparingly. Really, almost everything should go into - # target-specific build settings sections. The project-wide settings are - # only intended to be used in cases where Xcode attempts to resolve - # variable references in a project context as opposed to a target context, - # such as when resolving sourceTree references while building up the tree - # tree view for UI display. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): - xccl.SetBuildSetting(xck, xcv) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict['targets']: - target_name = target['target_name'] - toolset = target['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, - toolset) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties['targets'] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get('suppress_wildcard', False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == 'all': - has_custom_all = True; - - # If this target has a 'run_as' attribute, or is a test, add its - # target to the targets, and (if it's a test) add it the to the - # test targets. - is_test = int(target.get('test', 0)) - if target.get('run_as') or is_test: - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run ' + target_name, - 'productName': xcode_target.GetProperty('productName'), - 'buildConfigurationList': xccl, - }, - parent=self.project) - run_target.AddDependency(xcode_target) - - # The test runner target has a build phase that executes the - # test, if this has the 'test' attribute. If the 'run_as' tag - # doesn't exist (meaning that this must be a test), then we - # define a default test command line. - command = target.get('run_as', { - 'action': ['${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}'] - }) - - script = '' - if command.get('working_directory'): - script = script + 'cd "%s"\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get('working_directory')) - - if command.get('environment'): - script = script + "\n".join( - ['export %s="%s"' % - (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = '' - if is_test and serialize_all_tests: - command_prefix = \ -"""python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get('action'))) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - run_target.AppendProperty('buildPhases', ssbp) - - # Add the run target to the project file. - targets.append(run_target) - if is_test: - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties['targets']) == \ - len(ordinary_targets) + len(support_targets) - - self.project._properties['targets'] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'All', - }, - parent=self.project) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties['targets'].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'Run All Tests', - }, - parent=self.project) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties['targets'].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict['targets']: - if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): - tgt_name = bf_tgt['target_name'] - toolset = bf_tgt['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, - tgt_name, toolset) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty('dependencies') - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty('targetProxy') - dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') - target_dict = xcode_target_to_target_dict[dependency_xct] - if target_dict and int(target_dict.get('test', 0)): - assert dependency_xct.test_runner - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run %s Tests' % tgt_name, - 'productName': tgt_name, - }, - parent=self.project) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties['targets'].index(xcode_target) - self.project._properties['targets'].insert(idx + 1, run_all_target) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects.keys(): - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = \ - tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', - dir=self.path) - - try: - output_file = os.fdopen(output_fd, 'wb') - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, 'project.pbxproj') - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -cached_xcode_version = None -def InstalledXcodeVersion(): - """Fetches the installed version of Xcode, returns empty string if it is - unable to figure it out.""" - - global cached_xcode_version - if not cached_xcode_version is None: - return cached_xcode_version - - # Default to an empty string - cached_xcode_version = '' - - # Collect the xcodebuild's version information. - try: - import subprocess - cmd = ['/usr/bin/xcodebuild', '-version'] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - xcodebuild_version_info = proc.communicate()[0] - # Any error, return empty string - if proc.returncode: - xcodebuild_version_info = '' - except OSError: - # We failed to launch the tool - xcodebuild_version_info = '' - - # Pull out the Xcode version itself. - match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE) - if match_line: - cached_xcode_version = match_line.group(1) - # Done! - return cached_xcode_version - - -def AddSourceToTarget(source, pbxp, xct): - # TODO(mark): Perhaps this can be made a little bit fancier. - source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext != '': - ext = ext[1:].lower() - - if ext in source_extensions: - xct.SourcesPhase().AddFile(source) - else: - # Files that aren't added to a sources build phase can still go into - # the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -_xcode_variable_re = re.compile('(\$\((.*?)\))') -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches == None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if not variable in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -def EscapeXCodeArgument(s): - """We must escape the arguments that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals.""" - s = s.replace('\\', '\\\\') - s = s.replace('"', '\\"') - return '"' + s + '"' - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - parallel_builds = generator_flags.get('xcode_parallel_builds', True) - serialize_all_tests = \ - generator_flags.get('xcode_serialize_all_test_runs', True) - xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - if parallel_builds: - pbxp.SetProperty('attributes', - {'BuildIndependentTargetsInParallel': 'YES'}) - - main_group = pbxp.GetProperty('mainGroup') - build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) - main_group.AppendChild(build_group) - for included_file in build_file_dict['included_files']: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in xcode build (target %s)' % - qualified_target) - configuration_names = [spec['default_configuration']] - for configuration_name in sorted(spec['configurations'].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. We use the type - # with "+bundle" appended if the target has "mac_bundle" set. - _types = { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.apple.product-type.library.dynamic', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - 'executable+bundle': 'com.apple.product-type.application', - 'loadable_module+bundle': 'com.apple.product-type.bundle', - 'shared_library+bundle': 'com.apple.product-type.framework', - } - - target_properties = { - 'buildConfigurationList': xccl, - 'name': target_name, - } - - type = spec['type'] - is_bundle = int(spec.get('mac_bundle', 0)) - if type != 'none': - type_bundle_key = type - if is_bundle: - type_bundle_key += '+bundle' - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: - gyp.common.ExceptionAppend(e, "-- unknown product type while " - "writing target %s" % target_name) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - - target_product_name = spec.get('product_name') - if target_product_name is not None: - target_properties['productName'] = target_product_name - - xct = xctarget_type(target_properties, parent=pbxp, - force_outdir=spec.get('product_dir'), - force_prefix=spec.get('product_prefix'), - force_extension=spec.get('product_extension')) - pbxp.AppendProperty('targets', xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - # Xcode does not have a distinct type for loadable_modules that are pure - # BSD targets (ie-unbundled). It uses the same setup as a shared_library - # but the mach-o type is explictly set in the settings. So before we do - # anything else, for this one case, we stuff in that one setting. This - # would allow the other data in the spec to change it if need be. - if type == 'loadable_module' and not is_bundle: - xccl.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') - - spec_actions = spec.get('actions', []) - spec_rules = spec.get('rules', []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target as used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - if type != 'none' and (spec_actions or spec_rules): - support_xccl = CreateXCConfigurationList(configuration_names); - support_target_properties = { - 'buildConfigurationList': support_xccl, - 'name': target_name + ' Support', - } - if target_product_name: - support_target_properties['productName'] = \ - target_product_name + ' Support' - support_xct = \ - gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, - parent=pbxp) - pbxp.AppendProperty('targets', support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get('message') - if message: - message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) - else: - message = '' - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action['action']) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string) - - script = '' - # Include the optional message - if message_sh: - script += message_sh + '\n' - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': action['inputs'], - 'name': 'Action "' + action['action_name'] + '"', - 'outputPaths': action['outputs'], - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get('process_outputs_as_sources', False)): - for output in action['outputs']: - AddSourceToTarget(output, pbxp, xct) - - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - for output in action['outputs']: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule['extension']] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get('rule_sources', []): - rule_source_basename = posixpath.basename(rule_source) - (rule_source_root, rule_source_ext) = \ - posixpath.splitext(rule_source_basename) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - 'INPUT_FILE_BASE': rule_source_root, - 'INPUT_FILE_SUFFIX': rule_source_ext, - 'INPUT_FILE_NAME': rule_source_basename, - 'INPUT_FILE_PATH': rule_source, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get('outputs', []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaning $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( \ - concrete_outputs_for_this_rule_source) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get('process_outputs_as_sources', False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or \ - int(rule.get('process_outputs_as_mac_bundle_resources', False)): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get('message') - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = '@echo note: ' + ExpandXcodeVariables(message, - rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule['action']) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = '%s_%s.make' % (target_name, rule['rule_name']) - makefile_path = os.path.join(xcode_projects[build_file].path, - makefile_name) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, 'wb') - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write('all: \\\n') - for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] - if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (concrete_output, eol)) - - for (rule_source, concrete_outputs, message, action) in \ - zip(rule['rule_sources'], concrete_outputs_by_rule_source, - messages, actions): - makefile.write('\n') - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] - if concrete_output_index == 0: - bol = '' - else: - bol = ' ' - makefile.write('%s%s \\\n' % (bol, concrete_output)) - - concrete_output_dir = posixpath.dirname(concrete_output) - if (concrete_output_dir and - concrete_output_dir not in concrete_output_dirs): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(' : \\\n') - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] - if prerequisite_index == len(prerequisites) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (prerequisite, eol)) - - # Make sure that output directories exist before executing the rule - # action. - # TODO(mark): quote the list of concrete_output_dirs. - if len(concrete_output_dirs) > 0: - makefile.write('\tmkdir -p %s\n' % ' '.join(concrete_output_dirs)) - - # The rule message and action have already had the necessary variable - # substitutions performed. - if message: - makefile.write('\t%s\n' % message) - makefile.write('\t%s\n' % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = \ -"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" % makefile_name - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Rule "' + rule['rule_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - for group in ['inputs', 'inputs_excluded']: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get('sources', []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" if it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - # Add "copies". - for copy_group in spec.get('copies', []): - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ - 'name': 'Copy to ' + copy_group['destination'] - }, - parent=xct) - dest = copy_group['destination'] - if dest[0] not in ('/', '$'): - # Relative paths are relative to $(SRCROOT). - dest = '$(SRCROOT)/' + dest - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties['buildPhases'].insert(prebuild_index, pbxcp) - - for file in copy_group['files']: - pbxcp.AddFile(file) - - # Excluded files can also go into the project file. - for key in ['sources', 'mac_bundle_resources']: - excluded_key = key + '_excluded' - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - for action in spec.get('actions', []): - groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get('postbuilds', []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) - script = 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - xct.AppendProperty('buildPhases', ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if 'dependencies' in spec: - for dependency in spec['dependencies']: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if 'libraries' in spec: - for library in spec['libraries']: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - # TODO(mark): this logic isn't right. There are certain directories - # that are always searched, we should check to see if the library is - # in one of those directories, and if not, we should do the - # AppendBuildSetting thing. - if not posixpath.isabs(library) and not library.startswith('$'): - # TODO(mark): Need to check to see if library_dir is already in - # LIBRARY_SEARCH_PATHS. - library_dir = posixpath.dirname(library) - xct.AppendBuildSetting('LIBRARY_SEARCH_PATHS', library_dir) - - for configuration_name in configuration_names: - configuration = spec['configurations'][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get('mac_framework_dirs', []): - xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) - for include_dir in configuration.get('include_dirs', []): - xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) - if 'defines' in configuration: - for define in configuration['defines']: - set_define = EscapeXCodeArgument(define) - xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) - if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): - xcbc.SetBuildSetting(xck, xcv) - - build_files = [] - for build_file, build_file_dict in data.iteritems(): - if build_file.endswith('.gyp'): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, - xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff --git a/mozc_build_tools/gyp/pylib/gyp/input.py b/mozc_build_tools/gyp/pylib/gyp/input.py deleted file mode 100644 index 4163c6d..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/input.py +++ /dev/null @@ -1,2195 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler -import copy -import gyp.common -import optparse -import os.path -import re -import shlex -import subprocess -import sys - - -# A list of types that are treated as linkable. -linkable_types = ['executable', 'shared_library', 'loadable_module'] - -# A list of sections that contain links to other targets. -dependency_sections = ['dependencies', 'export_dependent_settings'] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - 'destination', - 'files', - 'include_dirs', - 'inputs', - 'libraries', - 'outputs', - 'sources', -] -path_sections = [] - - -def IsPathSection(section): - # If section ends in one of these characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section[-1:] in ('=', '+', '?', '!'): - section = section[0:-1] - - if section in path_sections or \ - section.endswith('_dir') or section.endswith('_dirs') or \ - section.endswith('_file') or section.endswith('_files') or \ - section.endswith('_path') or section.endswith('_paths'): - return True - return False - - -# base_non_configuraiton_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - 'actions', - 'configurations', - 'copies', - 'default_configuration', - 'dependencies', - 'dependencies_original', - 'link_languages', - 'libraries', - 'postbuilds', - 'product_dir', - 'product_extension', - 'product_name', - 'product_prefix', - 'rules', - 'run_as', - 'sources', - 'suppress_wildcard', - 'target_name', - 'test', - 'toolset', - 'toolsets', - 'type', - 'variants', - - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - 'variables', -] -non_configuration_keys = [] - -# Controls how the generator want the build file paths. -absolute_build_file_paths = False - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included == None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get('included', []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - - Note that this is slower than eval() is. - """ - - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) - - -def CheckNode(node, keypath): - if isinstance(node, Dict): - c = node.getChildren() - dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] - if key in dict: - raise KeyError, "Key '" + key + "' repeated at level " + \ - repr(len(keypath) + 1) + " with key path '" + \ - '.'.join(keypath) + "'" - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) - return dict - elif isinstance(node, List): - c = node.getChildren() - children = [] - for index, child in enumerate(c): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, Const): - return node.getChildren()[0] - else: - raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \ - "': " + repr(node) - - -def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, - is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() - else: - raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, - None) - except SyntaxError, e: - e.filename = build_file_path - raise - except Exception, e: - gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) - raise - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - try: - if is_target: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, includes, check) - else: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, None, check) - except Exception, e: - gyp.common.ExceptionAppend(e, - 'while reading includes of ' + build_file_path) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, - variables, includes, check): - includes_list = [] - if includes != None: - includes_list.extend(includes) - if 'includes' in subdict: - for include in subdict['includes']: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = \ - os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict['includes'] - - # Merge in the included files. - for include in includes_list: - if not 'included' in aux_data[subdict_path]: - aux_data[subdict_path]['included'] = [] - aux_data[subdict_path]['included'].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include) - - MergeDicts(subdict, - LoadOneBuildFile(include, data, aux_data, variables, None, - False, check), - subdict_path, include) - - # Recurse into subdictionaries. - for k, v in subdict.iteritems(): - if v.__class__ == dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, - None, check) - elif v.__class__ == list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, - check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, - variables, check): - for item in sublist: - if item.__class__ == dict: - LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, - variables, None, check) - elif item.__class__ == list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, - variables, check) - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if 'targets' in data: - target_list = data['targets'] - new_target_list = [] - for target in target_list: - global multiple_toolsets - if multiple_toolsets: - toolsets = target.get('toolsets', ['target']) - else: - toolsets = ['target'] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = copy.deepcopy(target) - new_target['toolset'] = build - new_target_list.append(new_target) - target['toolset'] = toolsets[0] - new_target_list.append(target) - data['targets'] = new_target_list - if 'conditions' in data: - for condition in data['conditions']: - if isinstance(condition, list): - for condition_dict in condition[1:]: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check): - global absolute_build_file_paths - - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == '': - variables['DEPTH'] = '.' - else: - variables['DEPTH'] = d.replace('\\', '/') - - # If the generator needs absolue paths, then do so. - if absolute_build_file_paths: - build_file_path = os.path.abspath(build_file_path) - - if build_file_path in data['target_build_files']: - # Already loaded. - return - data['target_build_files'].add(build_file_path) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'" % build_file_path) - - build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, - includes, True, check) - - # Store DEPTH for later use in generators. - build_file_data['_DEPTH'] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if 'included_files' in build_file_data: - raise KeyError, build_file_path + ' must not contain included_files key' - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data['included_files'] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = \ - gyp.common.RelativePath(included_file, - os.path.dirname(build_file_path)) - build_file_data['included_files'].append(included_relative) - - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict(build_file_data, False, variables, - build_file_path) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if 'target_defaults' in build_file_data: - index = 0 - if 'targets' in build_file_data: - while index < len(build_file_data['targets']): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data['targets'][index] - new_target_dict = copy.deepcopy(build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) - build_file_data['targets'][index] = new_target_dict - index = index + 1 - else: - raise Exception, \ - "Unable to find targets in build file %s" % build_file_path - - # No longer needed. - del build_file_data['target_defaults'] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - if 'targets' in build_file_data: - for target_dict in build_file_data['targets']: - if 'dependencies' not in target_dict: - continue - for dependency in target_dict['dependencies']: - other_build_file = \ - gyp.common.ResolveTarget(build_file_path, dependency, None)[0] - try: - LoadTargetBuildFile(other_build_file, data, aux_data, variables, - includes, depth, check) - except Exception, e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) - raise - - return data - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -def FindEnclosingBracketGroup(input): - brackets = { '}': '{', - ']': '[', - ')': '(', } - stack = [] - count = 0 - start = -1 - for char in input: - if char in brackets.values(): - stack.append(char) - if start == -1: - start = count - if char in brackets.keys(): - try: - last_bracket = stack.pop() - except IndexError: - return (-1, -1) - if last_bracket != brackets[char]: - return (-1, -1) - if len(stack) == 0: - return (start, count + 1) - count = count + 1 - return (-1, -1) - - -canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$') - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - if not isinstance(string, str) or not canonical_int_re.match(string): - return False - - return True - - -early_variable_re = re.compile('(?P(?P<((!?@?)|\|)?)' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') -late_variable_re = re.compile('(?P(?P>((!?@?)|\|)?)' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == 'win32': - if type(cmd) == list: - cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] - else: - cmd = re.sub('^cat ', 'type ', cmd) - return cmd - - -def ExpandVariables(input, is_late, variables, build_file): - # Look for the pattern that gets expanded into variables - if not is_late: - variable_re = early_variable_re - expansion_symbol = '<' - else: - variable_re = late_variable_re - expansion_symbol = '>' - - input_str = str(input) - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol in input_str: - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = [match for match in variable_re.finditer(input_str)] - else: - matches = None - - output = input_str - if matches: - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Matches: %s" % repr(match)) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). - - # run_command is true if a ! variant is used. - run_command = '!' in match['type'] - - # file_list is true if a | variant is used. - file_list = '|' in match['type'] - - # Capture these now so we can adjust them later. - replace_start = match_group.start('replace') - replace_end = match_group.end('replace') - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, is_late, - processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, is_late, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = '@' in match['type'] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == '': - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - if type(contents) == list: - contents_list = contents - else: - contents_list = contents.split(' ') - replacement = contents_list[0] - path = replacement - if not os.path.isabs(path): - path = os.path.join(build_file_dir, path) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write('%s\n' % i) - f.close() - - elif run_command: - use_shell = True - if match['is_array']: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. - # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is - # possible that the command being invoked depends on the current - # directory. For that case the syntax needs to be extended so that the - # directory is also used in cache_key (it becomes a tuple). - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = str(contents) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'" % - (contents,build_file_dir)) - - # Fix up command with platform specific workarounds. - contents = FixupPlatformCommand(contents) - p = subprocess.Popen(contents, shell=use_shell, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - stdin=subprocess.PIPE, - cwd=build_file_dir) - - (p_stdout, p_stderr) = p.communicate('') - - if p.wait() != 0 or p_stderr: - sys.stderr.write(p_stderr) - # Simulate check_call behavior, since check_call only exists - # in python 2.5 and later. - raise Exception("Call to '%s' returned exit status %d." % - (contents, p.returncode)) - replacement = p_stdout.rstrip() - - cached_command_results[cache_key] = replacement - else: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Had cache value for command '%s' in directory '%s'" % - (contents,build_file_dir)) - replacement = cached_value - - else: - if not contents in variables: - raise KeyError, 'Undefined variable ' + contents + \ - ' in ' + build_file - replacement = variables[contents] - - if isinstance(replacement, list): - for item in replacement: - if not isinstance(item, str) and not isinstance(item, int): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'list contains a ' + \ - item.__class__.__name__ - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList(replacement, is_late, variables, - build_file) - elif not isinstance(replacement, str) and \ - not isinstance(replacement, int): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'found a ' + replacement.__class__.__name__ - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if isinstance(replacement, list): - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = '' - if isinstance(replacement, list): - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = output[:replace_start] + str(encoded_replacement) + \ - output[replace_end:] - # Prepare for the next match iteration. - input_str = output - - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Found output %s, recursing." % repr(output)) - if isinstance(output, list): - new_output = [] - for item in output: - new_output.append(ExpandVariables(item, is_late, variables, build_file)) - output = new_output - else: - output = ExpandVariables(output, is_late, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if isinstance(output, list): - for index in xrange(0, len(output)): - if IsStrCanonicalInt(output[index]): - output[index] = int(output[index]) - elif IsStrCanonicalInt(output): - output = int(output) - - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Expanding %s to %s" % (repr(input), repr(output))) - return output - - -def ProcessConditionsInDict(the_dict, is_late, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on is_late. If is_late is False, 'conditions' is used. - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to is_late, immediately - # prior to being merged. - - if not is_late: - conditions_key = 'conditions' - else: - conditions_key = 'target_conditions' - - if not conditions_key in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - if not isinstance(condition, list): - raise TypeError, conditions_key + ' must be a list' - if len(condition) != 2 and len(condition) != 3: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise IndexError, conditions_key + ' ' + condition[0] + \ - ' must be length 2 or 3, not ' + len(condition) - - [cond_expr, true_dict] = condition[0:2] - false_dict = None - if len(condition) == 3: - false_dict = condition[2] - - # Do expansions on the condition itself. Since the conditon can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables, - build_file) - if not isinstance(cond_expr_expanded, str) and \ - not isinstance(cond_expr_expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ - - try: - ast_code = compile(cond_expr_expanded, '', 'eval') - - if eval(ast_code, {'__builtins__': None}, variables): - merge_dict = true_dict - else: - merge_dict = false_dict - except SyntaxError, e: - syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' - 'at character %d.' % - (str(e.args[0]), e.text, build_file, e.offset), - e.filename, e.lineno, e.offset, e.text) - raise syntax_error - except NameError, e: - gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % - (cond_expr_expanded, build_file)) - raise - - if merge_dict != None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict(merge_dict, is_late, - variables, build_file) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): - if isinstance(value, str) or isinstance(value, int) or \ - isinstance(value, list): - variables['_' + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): - if not isinstance(value, str) and not isinstance(value, int) and \ - not isinstance(value, list): - continue - - if key.endswith('%'): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key is 'variables' and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in, - build_file, the_dict_key=None): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if 'variables' in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late, - variables, build_file, 'variables') - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.iteritems(): - # Skip "variables", which was already processed if present. - if key != 'variables' and isinstance(value, str): - expanded = ExpandVariables(value, is_late, variables, build_file) - if not isinstance(expanded, str) and not isinstance(expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ + ' for ' + key - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, is_late, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): - # Skip "variables" and string values, which were already processed if - # present. - if key == 'variables' or isinstance(value, str): - continue - if isinstance(value, dict): - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict(value, is_late, variables, - build_file, key) - elif isinstance(value, list): - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, is_late, variables, - build_file) - elif not isinstance(value, int): - raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ - ' for ' + key - - -def ProcessVariablesAndConditionsInList(the_list, is_late, variables, - build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if isinstance(item, dict): - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file) - elif isinstance(item, list): - ProcessVariablesAndConditionsInList(item, is_late, variables, build_file) - elif isinstance(item, str): - expanded = ExpandVariables(item, is_late, variables, build_file) - if isinstance(expanded, str) or isinstance(expanded, int): - the_list[index] = expanded - elif isinstance(expanded, list): - del the_list[index] - for expanded_item in expanded: - the_list.insert(index, expanded_item) - index = index + 1 - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError, \ - 'Variable expansion in this context permits strings and ' + \ - 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ - index - elif not isinstance(item, int): - raise TypeError, 'Unknown type ' + item.__class__.__name__ + \ - ' at index ' + index - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data['target_build_files']: - for target in data[build_file].get('targets', []): - target_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) - if target_name in targets: - raise KeyError, 'Duplicate target definitions for ' + target_name - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - for target, target_dict in targets.iteritems(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict['toolset'] - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) - global multiple_toolsets - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget(dep_file, - dep_target, - dep_toolset) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if dependency_key != 'dependencies' and \ - dependency not in target_dict['dependencies']: - raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \ - ' of ' + target + ', but not in dependencies' - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.iteritems(): - toolset = target_dict['toolset'] - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in xrange" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - (dependency_build_file, dependency_target, dependency_toolset) = \ - gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != '*' and dependency_toolset != '*': - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \ - target + ' referring to same build file' - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]['targets'] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get('suppress_wildcard', False)): - continue - dependency_target_name = dependency_target_dict['target_name'] - if (dependency_target != '*' and - dependency_target != dependency_target_name): - continue - dependency_target_toolset = dependency_target_dict['toolset'] - if (dependency_toolset != '*' and - dependency_toolset != dependency_target_toolset): - continue - dependency = gyp.common.QualifiedTarget(dependency_build_file, - dependency_target_name, - dependency_target_toolset) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -class DependencyGraphNode(object): - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(Exception): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = [] - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = self.dependents[:] - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop(0) - flat_list.append(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in node.dependents: - is_in_degree_zero = True - for node_dependent_dependency in node_dependent.dependencies: - if not node_dependent_dependency.ref in flat_list: - # The dependent one or more dependencies not in flat_list. There - # will be more chances to add it to flat_list when examining - # it again as a dependent of those other dependencies, provided - # that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros.append(node_dependent) - - return flat_list - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies == None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in \ - dependency_dict.get('export_dependent_settings', []): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns a list of all of a target's dependencies, recursively.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - dependency.DeepDependencies(dependencies) - - return dependencies - - def LinkDependencies(self, targets, dependencies=None, initial=True): - """Returns a list of dependency targets that are linked into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect depenedencies - that are linked into the linkable target for which the list is being built. - """ - if dependencies == None: - dependencies = [] - - # Check for None, corresponding to the root node. - if self.ref == None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if not 'target_name' in targets[self.ref]: - raise Exception("Missing 'target_name' field in target.") - - try: - target_type = targets[self.ref]['type'] - except KeyError, e: - raise Exception("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Executables and loadable modules are already fully and finally linked. - # Nothing else can be a link dependency of them, there can only be - # dependencies in the sense that a dependent target might run an - # executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module'): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - if target_type != 'none': - # Special case: "none" type targets don't produce any linkable products - # and shouldn't be exposed as link dependencies, although dependencies - # of "none" type targets may still be link dependencies. - dependencies.append(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency.LinkDependencies(targets, dependencies, False) - - return dependencies - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.iteritems(): - if not target in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): - target_node = dependency_nodes[target] - target_build_file = gyp.common.BuildFile(target) - if not 'dependencies' in spec or len(spec['dependencies']) == 0: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - dependencies = spec['dependencies'] - for index in xrange(0, len(dependencies)): - try: - dependency = dependencies[index] - dependency_node = dependency_nodes[dependency] - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - except KeyError, e: - gyp.common.ExceptionAppend(e, - 'while trying to load target %s' % target) - raise - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). If you need to figure out what's wrong, look for elements of - # targets that are not in flat_list. - if len(flat_list) != len(targets): - raise DependencyGraphNode.CircularException, \ - 'Some targets not reachable, cycle in dependency graph detected' - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets.iterkeys(): - build_file = gyp.common.BuildFile(target) - if not build_file in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.iteritems(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get('dependencies', []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes[dependency_build_file] - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - except KeyError, e: - gyp.common.ExceptionAppend( - e, 'while computing dependencies of .gyp file %s' % build_file) - raise - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - bad_files = [] - for file in dependency_nodes.iterkeys(): - if not file in flat_list: - bad_files.append(file) - raise DependencyGraphNode.CircularException, \ - 'Some files not reachable, cycle in .gyp file dependency graph ' + \ - 'detected involving some or all of: ' + \ - ' '.join(bad_files) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == 'all_dependent_settings': - dependencies = dependency_nodes[target].DeepDependencies() - elif key == 'direct_dependent_settings': - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - elif key == 'link_settings': - dependencies = dependency_nodes[target].LinkDependencies(targets) - else: - raise KeyError, "DoDependentSettings doesn't know how to determine " + \ - 'dependencies for ' + key - - for dependency in dependencies: - dependency_dict = targets[dependency] - if not key in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts(target_dict, dependency_dict[key], - build_file, dependency_build_file) - - -def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict['type'] - - if target_type == 'static_library': - if not 'dependencies' in target_dict: - continue - - target_dict['dependencies_original'] = target_dict.get( - 'dependencies', [])[:] - - index = 0 - while index < len(target_dict['dependencies']): - dependency = target_dict['dependencies'][index] - dependency_dict = targets[dependency] - if dependency_dict['type'] == 'static_library' and \ - (not 'hard_dependency' in dependency_dict or \ - not dependency_dict['hard_dependency']): - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done - # when a dependent relies on some side effect other than just the - # build product, like a rule or action output. Take the dependency - # out of the list, and don't increment index because the next - # dependency to analyze will shift into the index formerly occupied - # by the one being removed. - del target_dict['dependencies'][index] - else: - index = index + 1 - - # If the dependencies list is empty, it's not needed, so unhook it. - if len(target_dict['dependencies']) == 0: - del target_dict['dependencies'] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].LinkDependencies(targets) - for dependency in link_dependencies: - if dependency == target: - continue - if not 'dependencies' in target_dict: - target_dict['dependencies'] = [] - if not dependency in target_dict['dependencies']: - target_dict['dependencies'].append(dependency) - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r'''["']?[-/$<>]''') - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - return os.path.normpath(os.path.join( - gyp.common.RelativePath(os.path.dirname(fro_file), - os.path.dirname(to_file)), - item)).replace('\\', '/') - - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - prepend_index = 0 - - for item in fro: - singleton = False - if isinstance(item, str) or isinstance(item, int): - # The cheap and easy case. - if is_paths: - to_item = MakePathRelative(to_file, fro_file, item) - else: - to_item = item - - if not isinstance(item, str) or not item.startswith('-'): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif isinstance(item, dict): - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif isinstance(item, list): - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError, \ - 'Attempt to merge list item of unsupported type ' + \ - item.__class__.__name__ - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not to_item in to: - to.append(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if isinstance(v, str) or isinstance(v, int): - if not (isinstance(to[k], str) or isinstance(to[k], int)): - bad_merge = True - elif v.__class__ != to[k].__class__: - bad_merge = True - - if bad_merge: - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[k].__class__.__name__ + \ - ' for key ' + k - if isinstance(v, str) or isinstance(v, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif isinstance(v, dict): - # Recurse, guaranteeing copies will be made of objects that require it. - if not k in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif isinstance(v, list): - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == '=': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '?'] - to[list_base] = [] - elif ext == '+': - list_base = k[:-1] - lists_incompatible = [list_base + '=', list_base + '?'] - append = False - elif ext == '?': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '=', list_base + '+'] - else: - list_base = k - lists_incompatible = [list_base + '=', list_base + '?'] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise KeyError, 'Incompatible list policies ' + k + ' and ' + \ - list_incompatible - - if list_base in to: - if ext == '?': - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - if not isinstance(to[list_base], list): - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[list_base].__class__.__name__ + \ - ' for key ' + list_base + '(' + k + ')' - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError, \ - 'Attempt to merge dict value of unsupported type ' + \ - v.__class__.__name__ + ' for key ' + k - - -def MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, visited): - # Skip if previously visted. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict['configurations'][configuration] - - # Merge in parents. - for parent in configuration_dict.get('inherit_from', []): - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, parent, visited + [configuration]) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, - build_file, build_file) - - # Drop abstract. - if 'abstract' in new_configuration_dict: - del new_configuration_dict['abstract'] - - -def SetUpConfigurations(target, target_dict): - global non_configuration_keys - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ['=', '+', '?', '!', '/'] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if not 'configurations' in target_dict: - target_dict['configurations'] = {'Default': {}} - if not 'default_configuration' in target_dict: - concrete = [i for i in target_dict['configurations'].keys() - if not target_dict['configurations'][i].get('abstract')] - target_dict['default_configuration'] = sorted(concrete)[0] - - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - # Skip abstract configurations (saves work only). - if old_configuration_dict.get('abstract'): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = copy.deepcopy(target_dict) - - # Take out the bits that don't belong in a "configurations" section. - # Since configuration setup is done before conditional, exclude, and rules - # processing, be careful with handling of the suffix characters used in - # those phases. - delete_keys = [] - for key in new_configuration_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base in non_configuration_keys: - delete_keys.append(key) - - for key in delete_keys: - del new_configuration_dict[key] - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, []) - - # Put the new result back into the target dict as a configuration. - target_dict['configurations'][configuration] = new_configuration_dict - - # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if not key_base in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.iteritems(): - operation = key[-1] - if operation != '!' and operation != '/': - continue - - if not isinstance(value, list): - raise ValueError, name + ' key ' + key + ' must be list, not ' + \ - value.__class__.__name__ - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if not isinstance(the_dict[list_key], list): - raise ValueError, name + ' key ' + list_key + \ - ' must be list, not ' + \ - value.__class__.__name__ + ' when applying ' + \ - {'!': 'exclusion', '/': 'regex'}[operation] - - if not list_key in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + '!' - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): - if exclude_item == the_list[index]: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + '/' - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - for index in xrange(0, len(the_list)): - list_item = the_list[index] - if pattern_re.search(list_item): - # Regular expression match. - - if action == 'exclude': - # This item matches an exclude regex, so set its value to 0 - # (exclude). - list_actions[index] = 0 - elif action == 'include': - # This item matches an include regex, so set its value to 1 - # (include). - list_actions[index] = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError, 'Unrecognized action ' + action + ' in ' + \ - name + ' key ' + key - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + '_excluded' - if excluded_key in the_dict: - raise KeyError, \ - name + ' key ' + excluded_key + ' must not be present prior ' + \ - ' to applying exclusion/regex filters for ' + list_key - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): - if isinstance(value, dict): - ProcessListFiltersInDict(key, value) - elif isinstance(value, list): - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if isinstance(item, dict): - ProcessListFiltersInDict(name, item) - elif isinstance(item, list): - ProcessListFiltersInList(name, item) - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get('rules', []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule['rule_name'] - if rule_name in rule_names: - raise KeyError, 'rule %s exists in duplicate, target %s' % \ - (rule_name, target) - rule_names[rule_name] = rule - - rule_extension = rule['extension'] - if rule_extension in rule_extensions: - raise KeyError, ('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % \ - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if 'rule_sources' in rule: - raise KeyError, \ - 'rule_sources must not exist in input, target %s rule %s' % \ - (target, rule_name) - extension = rule['extension'] - - rule_sources = [] - source_keys = ['sources'] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith('.'): - source_extension = source_extension[1:] - if source_extension == extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule['rule_sources'] = rule_sources - - -def ValidateActionsInTarget(target, target_dict, build_file): - '''Validates the inputs to the actions in a target.''' - target_name = target_dict.get('target_name') - actions = target_dict.get('actions', []) - for action in actions: - action_name = action.get('action_name') - if not action_name: - raise Exception("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) - inputs = action.get('inputs', []) - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get('target_name') - run_as = target_dict.get('run_as') - if not run_as: - return - if not isinstance(run_as, dict): - raise Exception("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) - action = run_as.get('action') - if not action: - raise Exception("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) - if not isinstance(action, list): - raise Exception("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) - working_directory = run_as.get('working_directory') - if working_directory and not isinstance(working_directory, str): - raise Exception("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) - environment = run_as.get('environment') - if environment and not isinstance(environment, dict): - raise Exception("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if isinstance(v, int): - v = str(v) - the_dict[k] = v - elif isinstance(v, dict): - TurnIntIntoStrInDict(v) - elif isinstance(v, list): - TurnIntIntoStrInList(v) - - if isinstance(k, int): - the_dict[str(k)] = v - del the_dict[k] - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index in xrange(0, len(the_list)): - item = the_list[index] - if isinstance(item, int): - the_list[index] = str(item) - elif isinstance(item, dict): - TurnIntIntoStrInDict(item) - elif isinstance(item, list): - TurnIntIntoStrInList(item) - - -def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specifc data. - global path_sections - path_sections = base_path_sections[:] - path_sections.extend(generator_input_info['path_sections']) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info['non_configuration_keys']) - - # TODO(mark) handle variants if the generator doesn't want them directly. - generator_handles_variants = \ - generator_input_info['generator_handles_variants'] - - global absolute_build_file_paths - absolute_build_file_paths = \ - generator_input_info['generator_wants_absolute_build_file_paths'] - - global multiple_toolsets - multiple_toolsets = generator_input_info[ - 'generator_supports_multiple_toolsets'] - - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {'target_build_files': set()} - aux_data = {} - for build_file in build_files: - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_file = os.path.normpath(build_file) - try: - LoadTargetBuildFile(build_file, data, aux_data, variables, includes, - depth, check) - except Exception, e: - gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - # Handle dependent settings of various types. - for settings_type in ['all_dependent_settings', - 'direct_dependent_settings', - 'link_settings']: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict(target_dict, True, variables, - build_file) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] diff --git a/mozc_build_tools/gyp/pylib/gyp/xcodeproj_file.py b/mozc_build_tools/gyp/pylib/gyp/xcodeproj_file.py deleted file mode 100644 index ebae02e..0000000 --- a/mozc_build_tools/gyp/pylib/gyp/xcodeproj_file.py +++ /dev/null @@ -1,2736 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.1 (specifically, -3.1.2), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -import posixpath -import re -import struct -import sys - -# hashlib is supplied as of Python 2.5 as the replacement interface for sha -# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import sha otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_sha1 = hashlib.sha1 -except ImportError: - import sha - _new_sha1 = sha.new - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile('^[A-Za-z0-9$./_]+$') - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile('___') - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[^ -~]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$') - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - -def ConvertVariablesToShellSyntax(input_string): - return re.sub('\$\((.*?)\)', '${\\1}', input_string) - -class XCObject(object): - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_requried is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attribues: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(' '): - _encode_transforms.append('\\U%04x' % i) - i = i + 1 - _encode_transforms[7] = '\\a' - _encode_transforms[8] = '\\b' - _encode_transforms[9] = '\\t' - _encode_transforms[10] = '\\n' - _encode_transforms[11] = '\\v' - _encode_transforms[12] = '\\f' - _encode_transforms[13] = '\\n' - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError, 'Strong dict for key ' + key + ' in ' + \ - self.__class__.__name__ - else: - that._properties[key] = value.copy() - else: - raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \ - ' for key ' + key + ' in ' + self.__class__.__name__ - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if 'name' in self._properties or \ - ('name' in self._schema and self._schema['name'][3]): - return self._properties['name'] - - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement Name' - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name != None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack('>i', len(data))) - hash.update(data) - - if hash == None: - hash = _new_sha1() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, hash.copy()) - - if overwrite or self.id == None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 - digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = '%08X%08X%08X' % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError, \ - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ - (descendant.id, str(descendant._properties), - str(other._properties), self._properties['rootObject'].Name()) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicing - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return '/* ' + comment.replace('*/', '(*)/') + ' */' - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == '\\': - return '\\\\' - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicing - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other nonprintable characters within the ASCII range (0 through 127 - # inclusive) are encoded as "\U001f" referring to the Unicode code point in - # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write('\t' * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicing Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = '' - comment = None - - if self._should_print_single_line: - sep = ' ' - element_tabs = '' - end_tabs = '' - else: - sep = '\n' - element_tabs = '\t' * (tabs + 1) - end_tabs = '\t' * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, unicode): - printable += self._EncodeString(value.encode('utf-8')) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString('') - else: - printable += self._EncodeString(value[0]) - else: - printable = '(' + sep - for item in value: - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item, flatten_list) + \ - ',' + sep - printable += end_tabs + ')' - elif isinstance(value, dict): - printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ - sep - printable += end_tabs + '}' - else: - raise TypeError, "Can't make " + value.__class__.__name__ + ' printable' - - if comment != None: - printable += ' ' + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = '' - after_kv = ' ' - else: - printable = '\t' * tabs - after_kv = '\n' - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == 'remoteGlobalIDString' and isinstance(self, - PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == 'buildSettings' and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable += self._XCPrintableValue(tabs, key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs, value_to_print, flatten_list) + \ - ';' + after_kv - except TypeError, e: - gyp.common.ExceptionAppend(e, - 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = '' - end_tabs = 0 - else: - sep = '\n' - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, '};\n') - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties == None: - return - - for property, value in properties.iteritems(): - # Make sure the property is in the schema. - if not property in self._schema: - raise KeyError, property + ' not in ' + self.__class__.__name__ - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + \ - ' must be list, not ' + value.__class__.__name__ - for item in value: - if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - 'item of ' + property + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - item.__class__.__name__ - elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + ' must be ' + \ - property_type.__name__ + ', not ' + value.__class__.__name__ - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe - # to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError, "Don't know how to copy a " + \ - value.__class__.__name__ + ' object for ' + \ - property + ' in ' + self.__class__.__name__ - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if not key in self._schema: - raise KeyError, key + ' not in ' + self.__class__.__name__ - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list' - if not isinstance(value, property_type): - raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - value.__class__.__name__ - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - if not key in self._properties: - self._properties[key] = [] - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and not property in self._properties: - raise KeyError, self.__class__.__name__ + ' requires ' + property - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and len(attributes) >= 5 and \ - not property in self._properties: - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update({ - 'comments': [0, str, 0, 0], - 'fileEncoding': [0, str, 0, 0], - 'includeInIndex': [0, int, 0, 0], - 'indentWidth': [0, int, 0, 0], - 'lineEnding': [0, int, 0, 0], - 'sourceTree': [0, str, 0, 1, ''], - 'tabWidth': [0, int, 0, 0], - 'usesTabs': [0, int, 0, 0], - 'wrapsLines': [0, int, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if 'path' in self._properties and not 'name' in self._properties: - path = self._properties['path'] - name = posixpath.basename(path) - if name != '' and path != name: - self.SetProperty('name', name) - - if 'path' in self._properties and \ - (not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == ''): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path']) - if source_tree != None: - self._properties['sourceTree'] = source_tree - if path != None: - self._properties['path'] = path - if source_tree != None and path == None and \ - not 'name' in self._properties: - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties['path'] - self._properties['name'] = source_tree - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - elif 'path' in self._properties: - return self._properties['path'] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties['mainGroup']: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if 'name' in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + '.name') - hashables.append(self._properties['name']) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path != None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + '.path') - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: 'file', - PBXGroup: 'group', - PBXVariantGroup: 'file', - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == 'group': - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products', - 'Build'] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and not other_name in order: - return -1 - if other_name in order and not self_name in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties['sourceTree'] != '': - components.append('$(' + self._properties['sourceTree'] + ')') - if 'path' in self._properties: - components.append(self._properties['path']) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and \ - (path == None or \ - (not path.startswith('/') and not path.startswith('$'))): - this_path = xche.PathFromSourceTreeAndPath() - if this_path != None and path != None: - path = posixpath.join(this_path, path) - elif this_path != None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update({ - 'children': [1, XCHierarchicalElement, 1, 1, []], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get('children', []): - self._AddChildToDicts(child) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError, 'Found multiple children with path ' + child_path - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get('name', None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError, 'Found multiple PBXVariantGroup children with ' + \ - 'name ' + str(child_name) + ' and path ' + \ - str(child_path) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty('children', child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties['remoteRef'] - if container_proxy._properties['remoteGlobalIDString'] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith('/'): - is_dir = True - normpath = posixpath.normpath(path) - if is_dir: - normpath = path + '/' - else: - normpath = path - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == '.lproj': - variant_name = parent_root - if grandparent == '': - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name == None - - path_split = path.split(posixpath.sep) - if len(path_split) == 1 or \ - ((is_dir or variant_name != None) and len(path_split) == 2) or \ - not hierarchical: - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name == None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(normpath) - if file_ref != None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({'path': path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref != None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference({'name': variant_name, - 'path': variant_path}) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref != None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({'path': next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), - hierarchical) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {'name': name} - if path != None: - variant_group_properties['path'] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while len(self._properties['children']) == 1 and \ - self._properties['children'][0].__class__ == PBXGroup: - # Loop to take over the innermost only-child group possible. - - child = self._properties['children'][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == '': - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if 'path' in old_properties: - if 'path' in self._properties: - # Both the original parent and child have paths set. - self._properties['path'] = posixpath.join(old_properties['path'], - self._properties['path']) - else: - # Only the original parent has a path, use it. - self._properties['path'] = old_properties['path'] - if 'sourceTree' in old_properties: - # The original parent had a sourceTree set, use it. - self._properties['sourceTree'] = old_properties['sourceTree'] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if 'name' in old_properties and old_properties['name'] != None and \ - old_properties['name'] != self.Name(): - self._properties['name'] = old_properties['name'] - if 'name' in self._properties and 'path' in self._properties and \ - self._properties['name'] == self._properties['path']: - del self._properties['name'] - - # Notify all children of their new parent. - for child in self._properties['children']: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties['children']: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties['children'] = \ - sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y)) - - # Recurse. - for child in self._properties['children']: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while xche != None and isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'explicitFileType': [0, str, 0, 0], - 'lastKnownFileType': [0, str, 0, 0], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 1], - }) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if 'path' in self._properties and self._properties['path'].endswith('/'): - self._properties['path'] = self._properties['path'][:-1] - is_dir = True - else: - is_dir = False - - if 'path' in self._properties and \ - not 'lastKnownFileType' in self._properties and \ - not 'explicitFileType' in self._properties: - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - 'a': 'archive.ar', - 'app': 'wrapper.application', - 'bdic': 'file', - 'bundle': 'wrapper.cfbundle', - 'c': 'sourcecode.c.c', - 'cc': 'sourcecode.cpp.cpp', - 'cpp': 'sourcecode.cpp.cpp', - 'css': 'text.css', - 'cxx': 'sourcecode.cpp.cpp', - 'dylib': 'compiled.mach-o.dylib', - 'framework': 'wrapper.framework', - 'h': 'sourcecode.c.h', - 'hxx': 'sourcecode.cpp.h', - 'icns': 'image.icns', - 'java': 'sourcecode.java', - 'js': 'sourcecode.javascript', - 'm': 'sourcecode.c.objc', - 'mm': 'sourcecode.cpp.objcpp', - 'nib': 'wrapper.nib', - 'pdf': 'image.pdf', - 'pl': 'text.script.perl', - 'plist': 'text.plist.xml', - 'pm': 'text.script.perl', - 'png': 'image.png', - 'py': 'text.script.python', - 'r': 'sourcecode.rez', - 'rez': 'sourcecode.rez', - 's': 'sourcecode.asm', - 'strings': 'text.plist.strings', - 'ttf': 'file', - 'xcconfig': 'text.xcconfig', - 'xib': 'file.xib', - 'y': 'sourcecode.yacc', - } - - if is_dir: - file_type = 'folder' - else: - basename = posixpath.basename(self._properties['path']) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != '': - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, 'text') - - self._properties['lastKnownFileType'] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'baseConfigurationReference': [0, PBXFileReference, 0, 0], - 'buildSettings': [0, dict, 0, 1, {}], - 'name': [0, str, 0, 1], - }) - - def HasBuildSetting(self, key): - return key in self._properties['buildSettings'] - - def GetBuildSetting(self, key): - return self._properties['buildSettings'][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties['buildSettings'][key] = value - - def AppendBuildSetting(self, key, value): - if not key in self._properties['buildSettings']: - self._properties['buildSettings'][key] = [] - self._properties['buildSettings'][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties['buildSettings']: - del self._properties['buildSettings'][key] - - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ XCBuildConfiguration({'name': 'Debug'}), - XCBuildConfiguration({'name': 'Release'}) ] - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs], - 'defaultConfigurationIsVisible': [0, int, 0, 1, 1], - 'defaultConfigurationName': [0, str, 0, 1, 'Release'], - }) - - def Name(self): - return 'Build configuration list for ' + \ - self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"' - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties['buildConfigurations']: - if configuration._properties['name'] == name: - return configuration - - raise KeyError, name - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties['defaultConfigurationName']) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties['buildConfigurations']: - configuration_has = configuration.HasBuildSetting(key) - if has == None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value == None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties['buildConfigurations']: - configuration_value = configuration.GetBuildSetting(key) - if value == None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError, 'Variant values for ' + key - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.DelBuildSetting(key) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'fileRef': [0, XCFileLikeElement, 0, 1], - }) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties['fileRef'].Name() + ' in ' + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties['fileRef'].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildActionMask': [0, int, 0, 1, 0x7fffffff], - 'files': [1, PBXBuildFile, 1, 1, []], - 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get('files', []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement FileGroup' - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError, 'Found multiple build files with path ' + path - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties['fileRef'] - - paths = [] - if path != None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties['children']: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if xcfilelikeelement in self._files_by_xcfilelikeelement and \ - self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile: - raise ValueError, 'Found multiple build files for ' + \ - xcfilelikeelement.Name() - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty('files', pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and \ - isinstance(file_ref, PBXVariantGroup): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - pbxbuildfile = PBXBuildFile({'fileRef': file_ref}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Headers' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Resources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Sources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Frameworks' - - def FileGroup(self, path): - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'inputPaths': [1, str, 0, 1, []], - 'name': [0, str, 0, 0], - 'outputPaths': [1, str, 0, 1, []], - 'shellPath': [0, str, 0, 1, '/bin/sh'], - 'shellScript': [0, str, 0, 1], - 'showEnvVarsInLog': [0, int, 0, 0], - }) - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'ShellScript' - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'dstPath': [0, str, 0, 1], - 'dstSubfolderSpec': [0, int, 0, 1], - 'name': [0, str, 0, 0], - }) - - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 10, # Frameworks - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns - } - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'CopyFiles' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] - if relative_path == None: - relative_path = '' - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith('/'): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError, 'Can\'t use path %s in a %s' % \ - (path, self.__class__.__name__) - - self._properties['dstPath'] = relative_path - self._properties['dstSubfolderSpec'] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'compilerSpec': [0, str, 0, 1], - 'filePatterns': [0, str, 0, 0], - 'fileType': [0, str, 0, 1], - 'isEditable': [0, int, 0, 1, 1], - 'outputFiles': [1, str, 0, 1, []], - 'script': [0, str, 0, 0], - }) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties['fileType']) - if 'filePatterns' in self._properties: - hashables.append(self._properties['filePatterns']) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update({ - 'containerPortal': [0, XCContainerPortal, 0, 1], - 'proxyType': [0, int, 0, 1], - 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1], - 'remoteInfo': [0, str, 0, 1], - }) - - def __repr__(self): - props = self._properties - name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo']) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['containerPortal'].Hashables()) - hashables.extend(self._properties['remoteGlobalIDString'].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update({ - 'name': [0, str, 0, 0], - 'target': [0, None.__class__, 0, 0], - 'targetProxy': [0, PBXContainerItemProxy, 1, 1], - }) - - def __repr__(self): - name = self._properties.get('name') or self._properties['target'].Name() - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['targetProxy'].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'fileType': [0, str, 0, 1], - 'path': [0, str, 0, 1], - 'remoteRef': [0, PBXContainerItemProxy, 1, 1], - }) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update({ - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'buildPhases': [1, XCBuildPhase, 1, 1, []], - 'dependencies': [1, PBXTargetDependency, 1, 1, []], - 'name': [0, str, 0, 1], - 'productName': [0, str, 0, 1], - }) - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if 'name' in self._properties: - if not 'productName' in self._properties: - self.SetProperty('productName', self._properties['name']) - - if 'productName' in self._properties: - if 'buildConfigurationList' in self._properties: - configs = self._properties['buildConfigurationList'] - if configs.HasBuildSetting('PRODUCT_NAME') == 0: - configs.SetBuildSetting('PRODUCT_NAME', - self._properties['productName']) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # The easy case. Add a dependency to another target in the same - # project file. - container = PBXContainerItemProxy({'containerPortal': pbxproject, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name()}) - dependency = PBXTargetDependency({'target': other, - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - else: - # The hard case. Add a dependency to a target in a different project - # file. Actually, this case isn't really so hard. - other_project_ref = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy({ - 'containerPortal': other_project_ref, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name(), - }) - dependency = PBXTargetDependency({'name': other.Name(), - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties['buildConfigurationList'].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties['buildConfigurationList'].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties['buildConfigurationList'].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties['buildConfigurationList'].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].SetBuildSetting(key, \ - value) - - def AppendBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ - value) - - def DelBuildSetting(self, key): - return self._properties['buildConfigurationList'].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema['target'][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update({ - 'buildPhases': [1, XCBuildPhase, 1, 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]], - 'buildRules': [1, PBXBuildRule, 1, 1, []], - 'productReference': [0, PBXFileReference, 0, 1], - 'productType': [0, str, 0, 1], - }) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the filen ame - # set_xc_exe_prefix : bool to say if EXECUTABLE_PREFIX should be set to the - # prefix value. - _product_filetypes = { - 'com.apple.product-type.application': ['wrapper.application', - '', '.app', False], - 'com.apple.product-type.bundle': ['wrapper.cfbundle', - '', '.bundle', False], - 'com.apple.product-type.framework': ['wrapper.framework', - '', '.framework', False], - 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib', - 'lib', '.dylib', True], - 'com.apple.product-type.library.static': ['archive.ar', - 'lib', '.a', False], - 'com.apple.product-type.tool': ['compiled.mach-o.executable', - '', '', False], - } - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCTarget.__init__(self, properties, id, parent) - - if 'productName' in self._properties and \ - 'productType' in self._properties and \ - not 'productReference' in self._properties and \ - self._properties['productType'] in self._product_filetypes: - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject != None: - products_group = pbxproject.ProductsGroup() - - if products_group != None: - (filetype, prefix, suffix, set_xc_exe_prefix) = \ - self._product_filetypes[self._properties['productType']] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_EXTENSION', force_extension) - else: - # Extension override. - suffix = '.' + force_extension - self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension) - - if filetype.startswith('compiled.mach-o.executable'): - product_name = self._properties['productName'] - product_name += suffix - suffix = '' - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_PREFIX', prefix) - else: - self.SetBuildSetting('EXECUTABLE_PREFIX', prefix) - - if force_outdir is not None: - self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties['productName'] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - ref_props = { - 'explicitFileType': filetype, - 'includeInIndex': 0, - 'path': prefix + product_name + suffix, - 'sourceTree': 'BUILT_PRODUCTS_DIR', - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty('productReference', file_ref) - - def GetBuildPhaseByType(self, type): - if not 'buildPhases' in self._properties: - return None - - the_phase = None - for phase in self._properties['buildPhases']: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase == None - the_phase = phase - - return the_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase == None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase == None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty('buildPhases', sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase == None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty('buildPhases', frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = 'com.apple.product-type.library.static' - shared_library_type = 'com.apple.product-type.library.dynamic' - framework_type = 'com.apple.product-type.framework' - if isinstance(other, PBXNativeTarget) and \ - 'productType' in self._properties and \ - self._properties['productType'] != static_library_type and \ - 'productType' in other._properties and \ - (other._properties['productType'] == static_library_type or \ - ((other._properties['productType'] == shared_library_type or \ - other._properties['productType'] == framework_type) and \ - ((not other.HasBuildSetting('MACH_O_TYPE')) or - other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): - - file_ref = other.GetProperty('productReference') - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty('files', - PBXBuildFile({'fileRef': file_ref})) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update({ - 'attributes': [0, dict, 0, 0], - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.1'], - 'hasScannedForEncodings': [0, int, 0, 1, 1], - 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], - 'projectDirPath': [0, str, 0, 1, ''], - 'projectReferences': [1, dict, 0, 0], - 'projectRoot': [0, str, 0, 1, ''], - 'targets': [1, XCTarget, 1, 1, []], - }) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == '.xcodeproj': - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return 'Project object' - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if 'projectReferences' in self._properties: - for reference in self._properties['projectReferences']: - children.append(reference['ProductGroup']) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if not 'mainGroup' in self._properties: - self.SetProperty('mainGroup', PBXGroup()) - - main_group = self._properties['mainGroup'] - group = main_group.GetChildByName(name) - if group == None: - group = PBXGroup({'name': name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName('Source') - - def ProductsGroup(self): - return self._GroupByName('Products') - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName('Intermediates') - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName('Frameworks') - - def ProjectsGroup(self): - return self._GroupByName('Projects') - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree != None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties['mainGroup']._properties['children']: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties['mainGroup']._properties['children'] = \ - sorted(self._properties['mainGroup']._properties['children'], - cmp=lambda x,y: x.CompareRootGroup(y)) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties['mainGroup']._properties['children']: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == 'Products': - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties['productReference'] - # Make sure that the product is already in the products group. - assert product in group._properties['children'] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties['children']) - group._properties['children'] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if not 'projectReferences' in self._properties: - self._properties['projectReferences'] = [] - - product_group = None - project_ref = None - - if not other_pbxproject in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({'name': 'Products'}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty('projectDirPath') - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference({ - 'lastKnownFileType': 'wrapper.pb-project', - 'path': other_path, - 'sourceTree': 'SOURCE_ROOT', - }) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty('projectReferences', ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties['projectReferences'] = \ - sorted(self._properties['projectReferences'], cmp=lambda x,y: - cmp(x['ProjectRef'].Name().lower(), - y['ProjectRef'].Name().lower())) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict['ProductGroup'] - project_ref = project_ref_dict['ProjectRef'] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - return [product_group, project_ref] - - def _SetUpProductReferences(self, other_pbxproject, product_group, - project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties['productReference'] - if product_group.GetChildByRemoteObject(other_fileref) == None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy({ - 'containerPortal': project_ref, - 'proxyType': 2, - 'remoteGlobalIDString': other_fileref, - 'remoteInfo': target.Name() - }) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy({ - 'fileType': other_fileref._properties['explicitFileType'], - 'path': other_fileref._properties['path'], - 'sourceTree': other_fileref._properties['sourceTree'], - 'remoteRef': container_item, - }) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString'] - y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString'] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties['productReference']) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict['ProductGroup'] - product_group._properties['children'] = sorted( - product_group._properties['children'], - cmp=lambda x, y: CompareProducts(x, y, remote_products)) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'archiveVersion': [0, int, 0, 1, 1], - 'classes': [0, dict, 0, 1, {}], - 'objectVersion': [0, int, 0, 1, 45], - 'rootObject': [0, PBXProject, 1, 1], - }) - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties['objects'] = {} - self._XCPrint(file, 0, '// !$*UTF8*$!\n') - if self._should_print_single_line: - self._XCPrint(file, 0, '{ ') - else: - self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), - cmp=lambda x, y: cmp(x, y)): - if property == 'objects': - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, '}\n') - del self._properties['objects'] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, 'objects = {') - else: - self._XCPrint(file, 1, 'objects = {\n') - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if not class_name in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, '\n') - self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n') - for object in sorted(objects_by_class[class_name], - cmp=lambda x, y: cmp(x.id, y.id)): - object.Print(file) - self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n') - - if self._should_print_single_line: - self._XCPrint(file, 0, '}; ') - else: - self._XCPrint(file, 1, '};\n') diff --git a/mozc_build_tools/gyp/samples/samples b/mozc_build_tools/gyp/samples/samples deleted file mode 100755 index 804b618..0000000 --- a/mozc_build_tools/gyp/samples/samples +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os.path -import shutil -import sys - - -gyps = [ - 'app/app.gyp', - 'base/base.gyp', - 'build/temp_gyp/googleurl.gyp', - 'build/all.gyp', - 'build/common.gypi', - 'build/external_code.gypi', - 'chrome/test/security_tests/security_tests.gyp', - 'chrome/third_party/hunspell/hunspell.gyp', - 'chrome/chrome.gyp', - 'media/media.gyp', - 'net/net.gyp', - 'printing/printing.gyp', - 'sdch/sdch.gyp', - 'skia/skia.gyp', - 'testing/gmock.gyp', - 'testing/gtest.gyp', - 'third_party/bzip2/bzip2.gyp', - 'third_party/icu38/icu38.gyp', - 'third_party/libevent/libevent.gyp', - 'third_party/libjpeg/libjpeg.gyp', - 'third_party/libpng/libpng.gyp', - 'third_party/libxml/libxml.gyp', - 'third_party/libxslt/libxslt.gyp', - 'third_party/lzma_sdk/lzma_sdk.gyp', - 'third_party/modp_b64/modp_b64.gyp', - 'third_party/npapi/npapi.gyp', - 'third_party/sqlite/sqlite.gyp', - 'third_party/zlib/zlib.gyp', - 'v8/tools/gyp/v8.gyp', - 'webkit/activex_shim/activex_shim.gyp', - 'webkit/activex_shim_dll/activex_shim_dll.gyp', - 'webkit/build/action_csspropertynames.py', - 'webkit/build/action_cssvaluekeywords.py', - 'webkit/build/action_jsconfig.py', - 'webkit/build/action_makenames.py', - 'webkit/build/action_maketokenizer.py', - 'webkit/build/action_useragentstylesheets.py', - 'webkit/build/rule_binding.py', - 'webkit/build/rule_bison.py', - 'webkit/build/rule_gperf.py', - 'webkit/tools/test_shell/test_shell.gyp', - 'webkit/webkit.gyp', -] - - -def Main(argv): - if len(argv) != 3 or argv[1] not in ['push', 'pull']: - print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] - return 1 - - path_to_chrome = argv[2] - - for g in gyps: - chrome_file = os.path.join(path_to_chrome, g) - local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) - if argv[1] == 'push': - print 'Copying %s to %s' % (local_file, chrome_file) - shutil.copyfile(local_file, chrome_file) - elif argv[1] == 'pull': - print 'Copying %s to %s' % (chrome_file, local_file) - shutil.copyfile(chrome_file, local_file) - else: - assert False - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff --git a/mozc_build_tools/gyp/samples/samples.bat b/mozc_build_tools/gyp/samples/samples.bat deleted file mode 100644 index 778d9c9..0000000 --- a/mozc_build_tools/gyp/samples/samples.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python %~dp0/samples %* diff --git a/mozc_build_tools/gyp/setup.py b/mozc_build_tools/gyp/setup.py deleted file mode 100755 index ed2b41a..0000000 --- a/mozc_build_tools/gyp/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from distutils.core import setup -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.command.install_scripts import install_scripts - -setup( - name='gyp', - version='0.1', - description='Generate Your Projects', - author='Chromium Authors', - author_email='chromium-dev@googlegroups.com', - url='http://code.google.com/p/gyp', - package_dir = {'': 'pylib'}, - packages=['gyp', 'gyp.generator'], - - scripts = ['gyp'], - cmdclass = {'install': install, - 'install_lib': install_lib, - 'install_scripts': install_scripts}, -) diff --git a/mozc_build_tools/gyp/test/actions/gyptest-all.py b/mozc_build_tools/gyp/test/actions/gyptest-all.py deleted file mode 100644 index 8db38d5..0000000 --- a/mozc_build_tools/gyp/test/actions/gyptest-all.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using an explicit build target of 'all'. -""" - -import glob -import os -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# Test that an "always run" action increases a counter on multiple invocations, -# and that a dependent action updates in step. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1') -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -# The "always run" action only counts to 2, but the dependent target will count -# forever if it's allowed to run. This verifies that the dependent target only -# runs when the "always run" action generates new output, not just because the -# "always run" ran. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - - -test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n") - - -expect = "Hello from generate_main.py\n" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('null_input', chdir=chdir, stdout=expect) - - -# Clean out files which may have been created if test.ALL was run. -def clean_dep_files(): - for file in (glob.glob('relocate/src/dep_*.txt') + - glob.glob('relocate/src/deps_all_done_*.txt')): - if os.path.exists(file): - os.remove(file) - -# Confirm our clean. -clean_dep_files() -test.must_not_exist('relocate/src/dep_1.txt') -test.must_not_exist('relocate/src/deps_all_done_first_123.txt') - -# Make sure all deps finish before an action is run on a 'None' target. -# If using the Make builder, add -j to make things more difficult. -arguments = [] -if test.format == 'make': - arguments = ['-j'] -test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src', - arguments=arguments) -test.must_exist('relocate/src/deps_all_done_first_123.txt') - -# Try again with a target that has deps in reverse. Output files from -# previous tests deleted. Confirm this execution did NOT run the ALL -# target which would mess up our dep tests. -clean_dep_files() -test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src', - arguments=arguments) -test.must_exist('relocate/src/deps_all_done_first_321.txt') -test.must_not_exist('relocate/src/deps_all_done_first_123.txt') - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/actions/gyptest-default.py b/mozc_build_tools/gyp/test/actions/gyptest-default.py deleted file mode 100644 index c877867..0000000 --- a/mozc_build_tools/gyp/test/actions/gyptest-default.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# Test that an "always run" action increases a counter on multiple invocations, -# and that a dependent action updates in step. -test.build('actions.gyp', chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1') -test.build('actions.gyp', chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -# The "always run" action only counts to 2, but the dependent target will count -# forever if it's allowed to run. This verifies that the dependent target only -# runs when the "always run" action generates new output, not just because the -# "always run" ran. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - - -test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n") - - -expect = "Hello from generate_main.py\n" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('null_input', chdir=chdir, stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/actions/gyptest-errors.py b/mozc_build_tools/gyp/test/actions/gyptest-errors.py deleted file mode 100644 index ca41487..0000000 --- a/mozc_build_tools/gyp/test/actions/gyptest-errors.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies behavior for different action configuration errors: -exit status of 1, and the expected error message must be in stderr. -""" - -import TestGyp - -test = TestGyp.TestGyp() - - -test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None) -expect = [ - "Anonymous action in target broken_actions2. An action must have an 'action_name' field.", -] -test.must_contain_all_lines(test.stderr(), expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/actions/src/action_missing_name.gyp b/mozc_build_tools/gyp/test/actions/src/action_missing_name.gyp deleted file mode 100644 index 00424c3..0000000 --- a/mozc_build_tools/gyp/test/actions/src/action_missing_name.gyp +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'broken_actions2', - 'type': 'none', - 'actions': [ - { - 'inputs': [ - 'no_name.input', - ], - 'action': [ - 'python', - '-c', - 'print \'missing name\'', - ], - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions/src/actions.gyp b/mozc_build_tools/gyp/test/actions/src/actions.gyp deleted file mode 100644 index 5d2db19..0000000 --- a/mozc_build_tools/gyp/test/actions/src/actions.gyp +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - 'subdir3/null_input.gyp:*', - ], - }, - { - 'target_name': 'depend_on_always_run_action', - 'type': 'none', - 'dependencies': [ 'subdir1/executable.gyp:counter' ], - 'actions': [ - { - 'action_name': 'use_always_run_output', - 'inputs': [ - 'subdir1/actions-out/action-counter.txt', - 'subdir1/counter.py', - ], - 'outputs': [ - 'subdir1/actions-out/action-counter_2.txt', - ], - 'action': [ - 'python', 'subdir1/counter.py', '<(_outputs)', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - - # Three deps which don't finish immediately. - # Each one has a small delay then creates a file. - # Delays are 1.0, 1.1, and 2.0 seconds. - { - 'target_name': 'dep_1', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_1.txt' ], - 'action_name': 'dep_1', - 'action': [ 'python', '-c', - 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - { - 'target_name': 'dep_2', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_2.txt' ], - 'action_name': 'dep_2', - 'action': [ 'python', '-c', - 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - { - 'target_name': 'dep_3', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_3.txt' ], - 'action_name': 'dep_3', - 'action': [ 'python', '-c', - 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - - # An action which assumes the deps have completed. - # Does NOT list the output files of it's deps as inputs. - # On success create the file deps_all_done_first.txt. - { - 'target_name': 'action_with_dependencies_123', - 'type': 'none', - 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ], - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'deps_all_done_first_123.txt' ], - 'action_name': 'action_with_dependencies_123', - 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - # Same as above but with deps in reverse. - { - 'target_name': 'action_with_dependencies_321', - 'type': 'none', - 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ], - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'deps_all_done_first_321.txt' ], - 'action_name': 'action_with_dependencies_321', - 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - - ], -} diff --git a/mozc_build_tools/gyp/test/actions/src/confirm-dep-files.py b/mozc_build_tools/gyp/test/actions/src/confirm-dep-files.py deleted file mode 100644 index 34efe28..0000000 --- a/mozc_build_tools/gyp/test/actions/src/confirm-dep-files.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python - -# Confirm presence of files generated by our targets we depend on. -# If they exist, create a new file. -# -# Note target's input files are explicitly NOT defined in the gyp file -# so they can't easily be passed to this script as args. - -import os -import sys - -outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt -if (os.path.exists("dep_1.txt") and - os.path.exists("dep_2.txt") and - os.path.exists("dep_3.txt")): - open(outfile, "w") diff --git a/mozc_build_tools/gyp/test/actions/src/subdir1/counter.py b/mozc_build_tools/gyp/test/actions/src/subdir1/counter.py deleted file mode 100644 index 3612d7d..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir1/counter.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys -import time - -output = sys.argv[1] -persistoutput = "%s.persist" % sys.argv[1] - -count = 0 -try: - count = open(persistoutput, 'r').read() -except: - pass -count = int(count) + 1 - -if len(sys.argv) > 2: - max_count = int(sys.argv[2]) - if count > max_count: - count = max_count - -oldcount = 0 -try: - oldcount = open(output, 'r').read() -except: - pass - -# Save the count in a file that is undeclared, and thus hidden, to gyp. We need -# to do this because, prior to running commands, scons deletes any declared -# outputs, so we would lose our count if we just wrote to the given output file. -# (The other option is to use Precious() in the scons generator, but that seems -# too heavy-handed just to support this somewhat unrealistic test case, and -# might lead to unintended side-effects). -open(persistoutput, 'w').write('%d' % (count)) - -# Only write the given output file if the count has changed. -if int(oldcount) != count: - open(output, 'w').write('%d' % (count)) - # Sleep so the next run changes the file time sufficiently to make the build - # detect the file as changed. - time.sleep(1) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/actions/src/subdir1/executable.gyp b/mozc_build_tools/gyp/test/actions/src/subdir1/executable.gyp deleted file mode 100644 index 6a1ce4f..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir1/executable.gyp +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - ], - 'actions': [ - { - 'action_name': 'make-prog1', - 'inputs': [ - 'make-prog1.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/prog1.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - { - 'action_name': 'make-prog2', - 'inputs': [ - 'make-prog2.py', - ], - 'outputs': [ - 'actions-out/prog2.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'counter', - 'type': 'none', - 'actions': [ - { - # This action should always run, regardless of whether or not it's - # inputs or the command-line change. We do this by creating a dummy - # first output, which is always missing, thus causing the build to - # always try to recreate it. Actual output files should be listed - # after the dummy one, and dependent targets should list the real - # output(s) in their inputs - # (see '../actions.gyp:depend_on_always_run_action'). - 'action_name': 'action_counter', - 'inputs': [ - 'counter.py', - ], - 'outputs': [ - 'actions-out/action-counter.txt.always', - 'actions-out/action-counter.txt', - ], - 'action': [ - 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog1.py b/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog1.py deleted file mode 100644 index 7ea1d8a..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog1.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog1(void) -{ - printf("Hello from make-prog1.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog2.py b/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog2.py deleted file mode 100644 index 0bfe497..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir1/make-prog2.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog2(void) -{ - printf("Hello from make-prog2.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/actions/src/subdir1/program.c b/mozc_build_tools/gyp/test/actions/src/subdir1/program.c deleted file mode 100644 index d5f661d..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/actions/src/subdir2/make-file.py b/mozc_build_tools/gyp/test/actions/src/subdir2/make-file.py deleted file mode 100644 index fff0653..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir2/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = "Hello from make-file.py\n" - -open(sys.argv[1], 'wb').write(contents) diff --git a/mozc_build_tools/gyp/test/actions/src/subdir2/none.gyp b/mozc_build_tools/gyp/test/actions/src/subdir2/none.gyp deleted file mode 100644 index 2caa97d..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir2/none.gyp +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - 'file.out', - # TODO: enhance testing infrastructure to test this - # without having to hard-code the intermediate dir paths. - #'<(INTERMEDIATE_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - } - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions/src/subdir3/generate_main.py b/mozc_build_tools/gyp/test/actions/src/subdir3/generate_main.py deleted file mode 100644 index b90b3aa..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir3/generate_main.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = """ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from generate_main.py\\n"); - return 0; -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/actions/src/subdir3/null_input.gyp b/mozc_build_tools/gyp/test/actions/src/subdir3/null_input.gyp deleted file mode 100644 index 9b0bea5..0000000 --- a/mozc_build_tools/gyp/test/actions/src/subdir3/null_input.gyp +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'null_input', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'generate_main', - 'process_outputs_as_sources': 1, - 'inputs': [], - 'outputs': [ - '<(INTERMEDIATE_DIR)/main.c', - ], - 'action': [ - # TODO: we can't just use <(_outputs) here?! - 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions-bare/gyptest-bare.py b/mozc_build_tools/gyp/test/actions-bare/gyptest-bare.py deleted file mode 100644 index b0c1093..0000000 --- a/mozc_build_tools/gyp/test/actions-bare/gyptest-bare.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies actions which are not depended on by other targets get executed. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('bare.gyp', chdir='src') -test.relocate('src', 'relocate/src') -test.build('bare.gyp', chdir='relocate/src') - -file_content = 'Hello from bare.py\n' - -test.built_file_must_match('out.txt', file_content, chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/actions-bare/src/bare.gyp b/mozc_build_tools/gyp/test/actions-bare/src/bare.gyp deleted file mode 100644 index 3d28f09..0000000 --- a/mozc_build_tools/gyp/test/actions-bare/src/bare.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'bare', - 'type': 'none', - 'actions': [ - { - 'action_name': 'action1', - 'inputs': [ - 'bare.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out.txt', - ], - 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions-bare/src/bare.py b/mozc_build_tools/gyp/test/actions-bare/src/bare.py deleted file mode 100644 index 970450e..0000000 --- a/mozc_build_tools/gyp/test/actions-bare/src/bare.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -f = open(sys.argv[1], 'wb') -f.write('Hello from bare.py\n') -f.close() diff --git a/mozc_build_tools/gyp/test/actions-subdir/gyptest-action.py b/mozc_build_tools/gyp/test/actions-subdir/gyptest-action.py deleted file mode 100644 index 09cfef1..0000000 --- a/mozc_build_tools/gyp/test/actions-subdir/gyptest-action.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Test actions that output to PRODUCT_DIR. -""" - -import TestGyp - -# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88 -test = TestGyp.TestGyp(formats=['!xcode']) - -test.run_gyp('none.gyp', chdir='src') - -test.build('none.gyp', test.ALL, chdir='src') - -file_content = 'Hello from make-file.py\n' -subdir_file_content = 'Hello from make-subdir-file.py\n' - -test.built_file_must_match('file.out', file_content, chdir='src') -test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/actions-subdir/src/make-file.py b/mozc_build_tools/gyp/test/actions-subdir/src/make-file.py deleted file mode 100644 index 74e5581..0000000 --- a/mozc_build_tools/gyp/test/actions-subdir/src/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = 'Hello from make-file.py\n' - -open(sys.argv[1], 'wb').write(contents) diff --git a/mozc_build_tools/gyp/test/actions-subdir/src/none.gyp b/mozc_build_tools/gyp/test/actions-subdir/src/none.gyp deleted file mode 100644 index 23f8d25..0000000 --- a/mozc_build_tools/gyp/test/actions-subdir/src/none.gyp +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - 'dependencies': [ - 'subdir/subdir.gyp:subdir_file', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py b/mozc_build_tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py deleted file mode 100644 index 80ce19a..0000000 --- a/mozc_build_tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = 'Hello from make-subdir-file.py\n' - -open(sys.argv[1], 'wb').write(contents) diff --git a/mozc_build_tools/gyp/test/actions-subdir/src/subdir/subdir.gyp b/mozc_build_tools/gyp/test/actions-subdir/src/subdir/subdir.gyp deleted file mode 100644 index 0315d4e..0000000 --- a/mozc_build_tools/gyp/test/actions-subdir/src/subdir/subdir.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'subdir_file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-subdir-file', - 'inputs': [ - 'make-subdir-file.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/subdir_file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/additional-targets/gyptest-additional.py b/mozc_build_tools/gyp/test/additional-targets/gyptest-additional.py deleted file mode 100644 index 02e7d7a..0000000 --- a/mozc_build_tools/gyp/test/additional-targets/gyptest-additional.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') -test.relocate('src', 'relocate/src') - -# Build all. -test.build('all.gyp', chdir='relocate/src') - -if test.format=='xcode': - chdir = 'relocate/src/dir1' -else: - chdir = 'relocate/src' - -# Output is as expected. -file_content = 'Hello from emit.py\n' -test.built_file_must_match('out2.txt', file_content, chdir=chdir) - -test.built_file_must_not_exist('out.txt', chdir='relocate/src') -test.built_file_must_not_exist('foolib1', - type=test.SHARED_LIB, - chdir=chdir) - -# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'? -if test.format == 'make': - chdir='relocate/src' -else: - chdir='relocate/src/dir1' - -# Build the action explicitly. -test.build('actions.gyp', 'action1_target', chdir=chdir) - -# Check that things got run. -file_content = 'Hello from emit.py\n' -test.built_file_must_exist('out.txt', chdir=chdir) - -# Build the shared library explicitly. -test.build('actions.gyp', 'foolib1', chdir=chdir) - -test.built_file_must_exist('foolib1', - type=test.SHARED_LIB, - chdir=chdir) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/additional-targets/src/all.gyp b/mozc_build_tools/gyp/test/additional-targets/src/all.gyp deleted file mode 100644 index 21c8308..0000000 --- a/mozc_build_tools/gyp/test/additional-targets/src/all.gyp +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'all_targets', - 'type': 'none', - 'dependencies': ['dir1/actions.gyp:*'], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/additional-targets/src/dir1/actions.gyp b/mozc_build_tools/gyp/test/additional-targets/src/dir1/actions.gyp deleted file mode 100644 index 5089c80..0000000 --- a/mozc_build_tools/gyp/test/additional-targets/src/dir1/actions.gyp +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'action1_target', - 'type': 'none', - 'suppress_wildcard': 1, - 'actions': [ - { - 'action_name': 'action1', - 'inputs': [ - 'emit.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out.txt', - ], - 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'action2_target', - 'type': 'none', - 'actions': [ - { - 'action_name': 'action2', - 'inputs': [ - 'emit.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out2.txt', - ], - 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'foolib1', - 'type': 'shared_library', - 'suppress_wildcard': 1, - 'sources': ['lib1.c'], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/mozc_build_tools/gyp/test/additional-targets/src/dir1/emit.py b/mozc_build_tools/gyp/test/additional-targets/src/dir1/emit.py deleted file mode 100644 index 5638c43..0000000 --- a/mozc_build_tools/gyp/test/additional-targets/src/dir1/emit.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -f = open(sys.argv[1], 'wb') -f.write('Hello from emit.py\n') -f.close() diff --git a/mozc_build_tools/gyp/test/additional-targets/src/dir1/lib1.c b/mozc_build_tools/gyp/test/additional-targets/src/dir1/lib1.c deleted file mode 100644 index df4cb10..0000000 --- a/mozc_build_tools/gyp/test/additional-targets/src/dir1/lib1.c +++ /dev/null @@ -1,6 +0,0 @@ -#ifdef _WIN32 -__declspec(dllexport) -#endif -int func1(void) { - return 42; -} diff --git a/mozc_build_tools/gyp/test/assembly/gyptest-assembly.py b/mozc_build_tools/gyp/test/assembly/gyptest-assembly.py deleted file mode 100644 index 40d0a06..0000000 --- a/mozc_build_tools/gyp/test/assembly/gyptest-assembly.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that .hpp files are ignored when included in the source list on all -platforms. -""" - -import sys -import TestGyp - -# TODO(bradnelson): get this working for windows. -test = TestGyp.TestGyp(formats=['make', 'scons', 'xcode']) - -test.run_gyp('assembly.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('assembly.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Got 42. -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/assembly/src/as.bat b/mozc_build_tools/gyp/test/assembly/src/as.bat deleted file mode 100644 index 0a47382..0000000 --- a/mozc_build_tools/gyp/test/assembly/src/as.bat +++ /dev/null @@ -1,4 +0,0 @@ -@echo off -:: Mock windows assembler. -cl /c %1 /Fo"%2" - diff --git a/mozc_build_tools/gyp/test/assembly/src/assembly.gyp b/mozc_build_tools/gyp/test/assembly/src/assembly.gyp deleted file mode 100644 index 872dd5e..0000000 --- a/mozc_build_tools/gyp/test/assembly/src/assembly.gyp +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - 'defines': ['PLATFORM_WIN'], - }], - ['OS=="mac"', { - 'defines': ['PLATFORM_MAC'], - }], - ['OS=="linux"', { - 'defines': ['PLATFORM_LINUX'], - }], - ], - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': ['lib1'], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': 'static_library', - 'sources': [ - 'lib1.S', - ], - }, - ], - 'conditions': [ - ['OS=="win"', { - 'target_defaults': { - 'rules': [ - { - 'rule_name': 'assembler', - 'msvs_cygwin_shell': 0, - 'extension': 'S', - 'inputs': [ - 'as.bat', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj', - ], - 'action': - ['as.bat', 'lib1.c', '<(_outputs)'], - 'message': 'Building assembly file <(RULE_INPUT_PATH)', - 'process_outputs_as_sources': 1, - }, - ], - }, - },], - ], -} diff --git a/mozc_build_tools/gyp/test/assembly/src/lib1.S b/mozc_build_tools/gyp/test/assembly/src/lib1.S deleted file mode 100644 index e7102bf..0000000 --- a/mozc_build_tools/gyp/test/assembly/src/lib1.S +++ /dev/null @@ -1,10 +0,0 @@ -#if PLATFORM_WINDOWS || PLATFORM_MAC -# define IDENTIFIER(n) _##n -#else /* Linux */ -# define IDENTIFIER(n) n -#endif - -.globl IDENTIFIER(lib1_function) -IDENTIFIER(lib1_function): - movl $42, %eax - ret diff --git a/mozc_build_tools/gyp/test/assembly/src/lib1.c b/mozc_build_tools/gyp/test/assembly/src/lib1.c deleted file mode 100644 index be21ecd..0000000 --- a/mozc_build_tools/gyp/test/assembly/src/lib1.c +++ /dev/null @@ -1,3 +0,0 @@ -int lib1_function(void) { - return 42; -} diff --git a/mozc_build_tools/gyp/test/assembly/src/program.c b/mozc_build_tools/gyp/test/assembly/src/program.c deleted file mode 100644 index ecce3b0..0000000 --- a/mozc_build_tools/gyp/test/assembly/src/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern int lib1_function(void); - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - fprintf(stdout, "Got %d.\n", lib1_function()); - fflush(stdout); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/gyptest-all.py b/mozc_build_tools/gyp/test/builddir/gyptest-all.py deleted file mode 100644 index 324d7fc..0000000 --- a/mozc_build_tools/gyp/test/builddir/gyptest-all.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify the settings that cause a set of programs to be created in -a specific build directory, and that no intermediate built files -get created outside of that build directory hierarchy even when -referred to with deeply-nested ../../.. paths. -""" - -import TestGyp - -# TODO(mmoss): Make only supports (theoretically) a single, global build -# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than -# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other -# generators support, so this doesn't work yet for make. -# TODO(mmoss) Make also has the issue that the top-level Makefile is written to -# the "--depth" location, which is one level above 'src', but then this test -# moves 'src' somewhere else, leaving the Makefile behind, so make can't find -# its sources. I'm not sure if make is wrong for writing outside the current -# directory, or if the test is wrong for assuming everything generated is under -# the current directory. -test = TestGyp.TestGyp(formats=['!make']) - -test.run_gyp('prog1.gyp', '--depth=..', chdir='src') - -test.relocate('src', 'relocate/src') - -test.subdir('relocate/builddir') - -# Make sure that all the built ../../etc. files only get put under builddir, -# by making all of relocate read-only and then making only builddir writable. -test.writable('relocate', False) -test.writable('relocate/builddir', True) - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello from func1.c -""" - -expect2 = """\ -Hello from subdir2/prog2.c -Hello from func2.c -""" - -expect3 = """\ -Hello from subdir2/subdir3/prog3.c -Hello from func3.c -""" - -expect4 = """\ -Hello from subdir2/subdir3/subdir4/prog4.c -Hello from func4.c -""" - -expect5 = """\ -Hello from subdir2/subdir3/subdir4/subdir5/prog5.c -Hello from func5.c -""" - -def run_builddir(prog, expect): - dir = 'relocate/builddir/Default/' - test.run(program=test.workpath(dir + prog), stdout=expect) - -run_builddir('prog1', expect1) -run_builddir('prog2', expect2) -run_builddir('prog3', expect3) -run_builddir('prog4', expect4) -run_builddir('prog5', expect5) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/builddir/gyptest-default.py b/mozc_build_tools/gyp/test/builddir/gyptest-default.py deleted file mode 100644 index 6171d15..0000000 --- a/mozc_build_tools/gyp/test/builddir/gyptest-default.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify the settings that cause a set of programs to be created in -a specific build directory, and that no intermediate built files -get created outside of that build directory hierarchy even when -referred to with deeply-nested ../../.. paths. -""" - -import TestGyp - -# TODO(mmoss): Make only supports (theoretically) a single, global build -# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than -# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other -# generators support, so this doesn't work yet for make. -# TODO(mmoss) Make also has the issue that the top-level Makefile is written to -# the "--depth" location, which is one level above 'src', but then this test -# moves 'src' somewhere else, leaving the Makefile behind, so make can't find -# its sources. I'm not sure if make is wrong for writing outside the current -# directory, or if the test is wrong for assuming everything generated is under -# the current directory. -test = TestGyp.TestGyp(formats=['!make']) - -test.run_gyp('prog1.gyp', '--depth=..', chdir='src') - -test.relocate('src', 'relocate/src') - -test.subdir('relocate/builddir') - -# Make sure that all the built ../../etc. files only get put under builddir, -# by making all of relocate read-only and then making only builddir writable. -test.writable('relocate', False) -test.writable('relocate/builddir', True) - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello from func1.c -""" - -expect2 = """\ -Hello from subdir2/prog2.c -Hello from func2.c -""" - -expect3 = """\ -Hello from subdir2/subdir3/prog3.c -Hello from func3.c -""" - -expect4 = """\ -Hello from subdir2/subdir3/subdir4/prog4.c -Hello from func4.c -""" - -expect5 = """\ -Hello from subdir2/subdir3/subdir4/subdir5/prog5.c -Hello from func5.c -""" - -def run_builddir(prog, expect): - dir = 'relocate/builddir/Default/' - test.run(program=test.workpath(dir + prog), stdout=expect) - -run_builddir('prog1', expect1) -run_builddir('prog2', expect2) -run_builddir('prog3', expect3) -run_builddir('prog4', expect4) -run_builddir('prog5', expect5) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/builddir/src/builddir.gypi b/mozc_build_tools/gyp/test/builddir/src/builddir.gypi deleted file mode 100644 index e3c6147..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/builddir.gypi +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Default': { - 'msvs_configuration_attributes': { - 'OutputDirectory': '<(DEPTH)\\builddir\Default', - }, - }, - }, - }, - 'scons_settings': { - 'sconsbuild_dir': '<(DEPTH)/builddir', - }, - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/builddir', - }, -} diff --git a/mozc_build_tools/gyp/test/builddir/src/func1.c b/mozc_build_tools/gyp/test/builddir/src/func1.c deleted file mode 100644 index b8e6a06..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/func1.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func1(void) -{ - printf("Hello from func1.c\n"); -} diff --git a/mozc_build_tools/gyp/test/builddir/src/func2.c b/mozc_build_tools/gyp/test/builddir/src/func2.c deleted file mode 100644 index 14aabac..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/func2.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func2(void) -{ - printf("Hello from func2.c\n"); -} diff --git a/mozc_build_tools/gyp/test/builddir/src/func3.c b/mozc_build_tools/gyp/test/builddir/src/func3.c deleted file mode 100644 index 3b4edea..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/func3.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func3(void) -{ - printf("Hello from func3.c\n"); -} diff --git a/mozc_build_tools/gyp/test/builddir/src/func4.c b/mozc_build_tools/gyp/test/builddir/src/func4.c deleted file mode 100644 index 732891b..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/func4.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func4(void) -{ - printf("Hello from func4.c\n"); -} diff --git a/mozc_build_tools/gyp/test/builddir/src/func5.c b/mozc_build_tools/gyp/test/builddir/src/func5.c deleted file mode 100644 index 18fdfab..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/func5.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func5(void) -{ - printf("Hello from func5.c\n"); -} diff --git a/mozc_build_tools/gyp/test/builddir/src/prog1.c b/mozc_build_tools/gyp/test/builddir/src/prog1.c deleted file mode 100644 index 674ca74..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/prog1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func1(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - func1(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/src/prog1.gyp b/mozc_build_tools/gyp/test/builddir/src/prog1.gyp deleted file mode 100644 index 5b96f03..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/prog1.gyp +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'pull_in_all', - 'type': 'none', - 'dependencies': [ - 'prog1', - 'subdir2/prog2.gyp:prog2', - 'subdir2/subdir3/prog3.gyp:prog3', - 'subdir2/subdir3/subdir4/prog4.gyp:prog4', - 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5', - ], - }, - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - 'func1.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.c b/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.c deleted file mode 100644 index bbdf4f0..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/prog2.c\n"); - func2(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.gyp b/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.gyp deleted file mode 100644 index 96299b6..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/prog2.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - '../func2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c deleted file mode 100644 index 10c530b..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func3(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/prog3.c\n"); - func3(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp deleted file mode 100644 index d7df43c..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'sources': [ - 'prog3.c', - '../../func3.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c deleted file mode 100644 index dcba9a9..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func4(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/subdir4/prog4.c\n"); - func4(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp deleted file mode 100644 index 862a8a1..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog4', - 'type': 'executable', - 'sources': [ - 'prog4.c', - '../../../func4.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c deleted file mode 100644 index 69132e5..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func5(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n"); - func5(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp b/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp deleted file mode 100644 index fe1c9cb..0000000 --- a/mozc_build_tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog5', - 'type': 'executable', - 'sources': [ - 'prog5.c', - '../../../../func5.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/compilable/gyptest-headers.py b/mozc_build_tools/gyp/test/compilable/gyptest-headers.py deleted file mode 100644 index 9176021..0000000 --- a/mozc_build_tools/gyp/test/compilable/gyptest-headers.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that .hpp files are ignored when included in the source list on all -platforms. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('headers.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('headers.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/compilable/src/headers.gyp b/mozc_build_tools/gyp/test/compilable/src/headers.gyp deleted file mode 100644 index b6c2a88..0000000 --- a/mozc_build_tools/gyp/test/compilable/src/headers.gyp +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1' - ], - 'sources': [ - 'program.cpp', - ], - }, - { - 'target_name': 'lib1', - 'type': 'static_library', - 'sources': [ - 'lib1.hpp', - 'lib1.cpp', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/compilable/src/lib1.cpp b/mozc_build_tools/gyp/test/compilable/src/lib1.cpp deleted file mode 100644 index 51bc31a..0000000 --- a/mozc_build_tools/gyp/test/compilable/src/lib1.cpp +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "lib1.hpp" - -void lib1_function(void) { - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/compilable/src/lib1.hpp b/mozc_build_tools/gyp/test/compilable/src/lib1.hpp deleted file mode 100644 index 72e63e8..0000000 --- a/mozc_build_tools/gyp/test/compilable/src/lib1.hpp +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef _lib1_hpp -#define _lib1_hpp - -extern void lib1_function(void); - -#endif diff --git a/mozc_build_tools/gyp/test/compilable/src/program.cpp b/mozc_build_tools/gyp/test/compilable/src/program.cpp deleted file mode 100644 index 81420ba..0000000 --- a/mozc_build_tools/gyp/test/compilable/src/program.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include -#include "lib1.hpp" - -int main(int argc, char *argv[]) { - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - lib1_function(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/configurations/basics/configurations.c b/mozc_build_tools/gyp/test/configurations/basics/configurations.c deleted file mode 100644 index 6c1f900..0000000 --- a/mozc_build_tools/gyp/test/configurations/basics/configurations.c +++ /dev/null @@ -1,15 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#ifdef FOO - printf("Foo configuration\n"); -#endif -#ifdef DEBUG - printf("Debug configuration\n"); -#endif -#ifdef RELEASE - printf("Release configuration\n"); -#endif - return 0; -} diff --git a/mozc_build_tools/gyp/test/configurations/basics/configurations.gyp b/mozc_build_tools/gyp/test/configurations/basics/configurations.gyp deleted file mode 100644 index 93f1d8d..0000000 --- a/mozc_build_tools/gyp/test/configurations/basics/configurations.gyp +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - 'configurations': { - 'Debug': { - 'defines': [ - 'DEBUG', - ], - }, - 'Release': { - 'defines': [ - 'RELEASE', - ], - }, - 'Foo': { - 'defines': [ - 'FOO', - ], - }, - } - }, - ], -} diff --git a/mozc_build_tools/gyp/test/configurations/basics/gyptest-configurations.py b/mozc_build_tools/gyp/test/configurations/basics/gyptest-configurations.py deleted file mode 100644 index 27cd2e8..0000000 --- a/mozc_build_tools/gyp/test/configurations/basics/gyptest-configurations.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('configurations.gyp') - -test.set_configuration('Release') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Release configuration\n") - -test.set_configuration('Debug') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Debug configuration\n") - -test.set_configuration('Foo') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Foo configuration\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/configurations/inheritance/configurations.c b/mozc_build_tools/gyp/test/configurations/inheritance/configurations.c deleted file mode 100644 index 2d5565e..0000000 --- a/mozc_build_tools/gyp/test/configurations/inheritance/configurations.c +++ /dev/null @@ -1,21 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#ifdef BASE - printf("Base configuration\n"); -#endif -#ifdef COMMON - printf("Common configuration\n"); -#endif -#ifdef COMMON2 - printf("Common2 configuration\n"); -#endif -#ifdef DEBUG - printf("Debug configuration\n"); -#endif -#ifdef RELEASE - printf("Release configuration\n"); -#endif - return 0; -} diff --git a/mozc_build_tools/gyp/test/configurations/inheritance/configurations.gyp b/mozc_build_tools/gyp/test/configurations/inheritance/configurations.gyp deleted file mode 100644 index 9441376..0000000 --- a/mozc_build_tools/gyp/test/configurations/inheritance/configurations.gyp +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Base': { - 'abstract': 1, - 'defines': ['BASE'], - }, - 'Common': { - 'abstract': 1, - 'inherit_from': ['Base'], - 'defines': ['COMMON'], - }, - 'Common2': { - 'abstract': 1, - 'defines': ['COMMON2'], - }, - 'Debug': { - 'inherit_from': ['Common', 'Common2'], - 'defines': ['DEBUG'], - }, - 'Release': { - 'inherit_from': ['Common', 'Common2'], - 'defines': ['RELEASE'], - }, - }, - }, - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/configurations/inheritance/gyptest-inheritance.py b/mozc_build_tools/gyp/test/configurations/inheritance/gyptest-inheritance.py deleted file mode 100644 index 22c73a3..0000000 --- a/mozc_build_tools/gyp/test/configurations/inheritance/gyptest-inheritance.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('configurations.gyp') - -test.set_configuration('Release') -test.build('configurations.gyp') -test.run_built_executable('configurations', - stdout=('Base configuration\n' - 'Common configuration\n' - 'Common2 configuration\n' - 'Release configuration\n')) - -test.set_configuration('Debug') -test.build('configurations.gyp') -test.run_built_executable('configurations', - stdout=('Base configuration\n' - 'Common configuration\n' - 'Common2 configuration\n' - 'Debug configuration\n')) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/configurations/target_platform/configurations.gyp b/mozc_build_tools/gyp/test/configurations/target_platform/configurations.gyp deleted file mode 100644 index d15429f..0000000 --- a/mozc_build_tools/gyp/test/configurations/target_platform/configurations.gyp +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Debug_Win32': { - 'msvs_configuration_platform': 'Win32', - }, - 'Debug_x64': { - 'msvs_configuration_platform': 'x64', - }, - }, - }, - 'targets': [ - { - 'target_name': 'left', - 'type': 'static_library', - 'sources': [ - 'left.c', - ], - 'configurations': { - 'Debug_Win32': { - 'msvs_target_platform': 'x64', - }, - }, - }, - { - 'target_name': 'right', - 'type': 'static_library', - 'sources': [ - 'right.c', - ], - }, - { - 'target_name': 'front_left', - 'type': 'executable', - 'dependencies': ['left'], - 'sources': [ - 'front.c', - ], - 'configurations': { - 'Debug_Win32': { - 'msvs_target_platform': 'x64', - }, - }, - }, - { - 'target_name': 'front_right', - 'type': 'executable', - 'dependencies': ['right'], - 'sources': [ - 'front.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/configurations/target_platform/front.c b/mozc_build_tools/gyp/test/configurations/target_platform/front.c deleted file mode 100644 index 12b1d0a..0000000 --- a/mozc_build_tools/gyp/test/configurations/target_platform/front.c +++ /dev/null @@ -1,8 +0,0 @@ -#include - -const char *message(void); - -int main(int argc, char *argv[]) { - printf("%s\n", message()); - return 0; -} diff --git a/mozc_build_tools/gyp/test/configurations/target_platform/gyptest-target_platform.py b/mozc_build_tools/gyp/test/configurations/target_platform/gyptest-target_platform.py deleted file mode 100644 index ae4e9e5..0000000 --- a/mozc_build_tools/gyp/test/configurations/target_platform/gyptest-target_platform.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Tests the msvs specific msvs_target_platform option. -""" - -import TestGyp -import TestCommon - - -def RunX64(exe, stdout): - try: - test.run_built_executable(exe, stdout=stdout) - except WindowsError, e: - # Assume the exe is 64-bit if it can't load on 32-bit systems. - # Both versions of the error are required because different versions - # of python seem to return different errors for invalid exe type. - if e.errno != 193 and '[Error 193]' not in str(e): - raise - - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('configurations.gyp') - -test.set_configuration('Debug|x64') -test.build('configurations.gyp', rebuild=True) -RunX64('front_left', stdout=('left\n')) -RunX64('front_right', stdout=('right\n')) - -test.set_configuration('Debug|Win32') -test.build('configurations.gyp', rebuild=True) -RunX64('front_left', stdout=('left\n')) -test.run_built_executable('front_right', stdout=('right\n')) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/configurations/target_platform/left.c b/mozc_build_tools/gyp/test/configurations/target_platform/left.c deleted file mode 100644 index 1ce2ea1..0000000 --- a/mozc_build_tools/gyp/test/configurations/target_platform/left.c +++ /dev/null @@ -1,3 +0,0 @@ -const char *message(void) { - return "left"; -} diff --git a/mozc_build_tools/gyp/test/configurations/target_platform/right.c b/mozc_build_tools/gyp/test/configurations/target_platform/right.c deleted file mode 100644 index b157849..0000000 --- a/mozc_build_tools/gyp/test/configurations/target_platform/right.c +++ /dev/null @@ -1,3 +0,0 @@ -const char *message(void) { - return "right"; -} diff --git a/mozc_build_tools/gyp/test/configurations/x64/configurations.c b/mozc_build_tools/gyp/test/configurations/x64/configurations.c deleted file mode 100644 index 72c97e3..0000000 --- a/mozc_build_tools/gyp/test/configurations/x64/configurations.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -int main(int argc, char *argv[]) { - if (sizeof(void*) == 4) { - printf("Running Win32\n"); - } else if (sizeof(void*) == 8) { - printf("Running x64\n"); - } else { - printf("Unexpected platform\n"); - } - return 0; -} diff --git a/mozc_build_tools/gyp/test/configurations/x64/configurations.gyp b/mozc_build_tools/gyp/test/configurations/x64/configurations.gyp deleted file mode 100644 index 06ffa37..0000000 --- a/mozc_build_tools/gyp/test/configurations/x64/configurations.gyp +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Debug': { - 'msvs_configuration_platform': 'Win32', - }, - 'Debug_x64': { - 'inherit_from': ['Debug'], - 'msvs_configuration_platform': 'x64', - }, - }, - }, - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/configurations/x64/gyptest-x86.py b/mozc_build_tools/gyp/test/configurations/x64/gyptest-x86.py deleted file mode 100644 index 254ea6f..0000000 --- a/mozc_build_tools/gyp/test/configurations/x64/gyptest-x86.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('configurations.gyp') - -for platform in ['Win32', 'x64']: - test.set_configuration('Debug|%s' % platform) - test.build('configurations.gyp', rebuild=True) - try: - test.run_built_executable('configurations', - stdout=('Running %s\n' % platform)) - except WindowsError, e: - # Assume the exe is 64-bit if it can't load on 32-bit systems. - if platform == 'x64' and (e.errno == 193 or '[Error 193]' in str(e)): - continue - raise - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/copies/gyptest-all.py b/mozc_build_tools/gyp/test/copies/gyptest-all.py deleted file mode 100644 index 8542ab7..0000000 --- a/mozc_build_tools/gyp/test/copies/gyptest-all.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies.gyp', test.ALL, chdir='relocate/src') - -test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n') - -test.built_file_must_match('copies-out/file2', - 'file2 contents\n', - chdir='relocate/src') - -test.built_file_must_match('copies-out/directory/file3', - 'file3 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/file4', - 'file4 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/subdir/file5', - 'file5 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/subdir/file6', - 'file6 contents\n', - chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/copies/gyptest-default.py b/mozc_build_tools/gyp/test/copies/gyptest-default.py deleted file mode 100644 index a5d1bf9..0000000 --- a/mozc_build_tools/gyp/test/copies/gyptest-default.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using the build tool default. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies.gyp', chdir='relocate/src') - -test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n') - -test.built_file_must_match('copies-out/file2', - 'file2 contents\n', - chdir='relocate/src') - -test.built_file_must_match('copies-out/directory/file3', - 'file3 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/file4', - 'file4 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/subdir/file5', - 'file5 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/subdir/file6', - 'file6 contents\n', - chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/copies/src/copies.gyp b/mozc_build_tools/gyp/test/copies/src/copies.gyp deleted file mode 100644 index ce2e0ca..0000000 --- a/mozc_build_tools/gyp/test/copies/src/copies.gyp +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'copies1', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file1', - ], - }, - ], - }, - { - 'target_name': 'copies2', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file2', - ], - }, - ], - }, - # Copy a directory tree. - { - 'target_name': 'copies_recursive', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'directory/', - ], - }, - ], - }, - # Copy a directory from deeper in the tree (this should not reproduce the - # entire directory path in the destination, only the final directory). - { - 'target_name': 'copies_recursive_depth', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'parentdir/subdir/', - ], - }, - ], - }, - # Verify that a null 'files' list doesn't gag the generators. - { - 'target_name': 'copies_null', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-null', - 'files': [], - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/copies/src/directory/file3 b/mozc_build_tools/gyp/test/copies/src/directory/file3 deleted file mode 100644 index 43f16f3..0000000 --- a/mozc_build_tools/gyp/test/copies/src/directory/file3 +++ /dev/null @@ -1 +0,0 @@ -file3 contents diff --git a/mozc_build_tools/gyp/test/copies/src/directory/file4 b/mozc_build_tools/gyp/test/copies/src/directory/file4 deleted file mode 100644 index 5f7270a..0000000 --- a/mozc_build_tools/gyp/test/copies/src/directory/file4 +++ /dev/null @@ -1 +0,0 @@ -file4 contents diff --git a/mozc_build_tools/gyp/test/copies/src/directory/subdir/file5 b/mozc_build_tools/gyp/test/copies/src/directory/subdir/file5 deleted file mode 100644 index 41f4718..0000000 --- a/mozc_build_tools/gyp/test/copies/src/directory/subdir/file5 +++ /dev/null @@ -1 +0,0 @@ -file5 contents diff --git a/mozc_build_tools/gyp/test/copies/src/file1 b/mozc_build_tools/gyp/test/copies/src/file1 deleted file mode 100644 index 84d55c5..0000000 --- a/mozc_build_tools/gyp/test/copies/src/file1 +++ /dev/null @@ -1 +0,0 @@ -file1 contents diff --git a/mozc_build_tools/gyp/test/copies/src/file2 b/mozc_build_tools/gyp/test/copies/src/file2 deleted file mode 100644 index af1b8ae..0000000 --- a/mozc_build_tools/gyp/test/copies/src/file2 +++ /dev/null @@ -1 +0,0 @@ -file2 contents diff --git a/mozc_build_tools/gyp/test/copies/src/parentdir/subdir/file6 b/mozc_build_tools/gyp/test/copies/src/parentdir/subdir/file6 deleted file mode 100644 index f5d5757..0000000 --- a/mozc_build_tools/gyp/test/copies/src/parentdir/subdir/file6 +++ /dev/null @@ -1 +0,0 @@ -file6 contents diff --git a/mozc_build_tools/gyp/test/copies-link/gyptest-copies-link.py b/mozc_build_tools/gyp/test/copies-link/gyptest-copies-link.py deleted file mode 100644 index fe7b602..0000000 --- a/mozc_build_tools/gyp/test/copies-link/gyptest-copies-link.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using the build tool default. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies-link.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies-link.gyp', chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/copies-link/src/copies-link.gyp b/mozc_build_tools/gyp/test/copies-link/src/copies-link.gyp deleted file mode 100644 index 9d2530a..0000000 --- a/mozc_build_tools/gyp/test/copies-link/src/copies-link.gyp +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'func1', - 'type': 'static_library', - 'sources': ['func1.c'], - }, - { - 'target_name': 'clone_func1', - 'type': 'none', - 'dependencies': ['func1'], - 'actions': [ - { - 'action_name': 'cloning library', - 'inputs': [ - '<(LIB_DIR)/<(STATIC_LIB_PREFIX)func1<(STATIC_LIB_SUFFIX)' - ], - 'outputs': ['<(PRODUCT_DIR)/alternate/' - '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)'], - 'destination': '<(PRODUCT_DIR)', - 'action': ['python', 'copy.py', '<@(_inputs)', '<@(_outputs)'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'copy_cloned', - 'type': 'none', - 'dependencies': ['clone_func1'], - 'copies': [ - { - 'destination': '<(LIB_DIR)', - 'files': [ - '<(PRODUCT_DIR)/alternate/' - '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)', - ], - }, - ], - }, - { - 'target_name': 'use_cloned', - 'type': 'executable', - 'sources': ['main.c'], - 'dependencies': ['copy_cloned'], - 'link_settings': { - 'conditions': [ - ['OS=="win"', { - 'libraries': ['-l"<(LIB_DIR)/cloned.lib"'], - }, { - 'libraries': ['-lcloned'], - 'ldflags': ['-L <(LIB_DIR)'], - }], - ], - }, - }, - ], -} diff --git a/mozc_build_tools/gyp/test/copies-link/src/copy.py b/mozc_build_tools/gyp/test/copies-link/src/copy.py deleted file mode 100644 index a1dd871..0000000 --- a/mozc_build_tools/gyp/test/copies-link/src/copy.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import shutil -import sys - - -def main(argv): - if len(argv) != 3: - print 'USAGE: copy.py ' - return 1 - - shutil.copy(argv[1], argv[2]) - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/mozc_build_tools/gyp/test/copies-link/src/func1.c b/mozc_build_tools/gyp/test/copies-link/src/func1.c deleted file mode 100644 index 56fd2f0..0000000 --- a/mozc_build_tools/gyp/test/copies-link/src/func1.c +++ /dev/null @@ -1,9 +0,0 @@ -#include - -extern void func1(void); - -int main(int argc, char *argv[]) { - printf("hello from link1\n"); - func1(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/copies-link/src/main.c b/mozc_build_tools/gyp/test/copies-link/src/main.c deleted file mode 100644 index cceccdd..0000000 --- a/mozc_build_tools/gyp/test/copies-link/src/main.c +++ /dev/null @@ -1,5 +0,0 @@ -#include - -void func1(void) { - printf("hello from func1\n"); -} diff --git a/mozc_build_tools/gyp/test/defines/defines-env.gyp b/mozc_build_tools/gyp/test/defines/defines-env.gyp deleted file mode 100644 index 1781546..0000000 --- a/mozc_build_tools/gyp/test/defines/defines-env.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'value%': '5', - }, - 'targets': [ - { - 'target_name': 'defines', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'VALUE=<(value)', - ], - }, - ], -} - diff --git a/mozc_build_tools/gyp/test/defines/defines.c b/mozc_build_tools/gyp/test/defines/defines.c deleted file mode 100644 index 33657ac..0000000 --- a/mozc_build_tools/gyp/test/defines/defines.c +++ /dev/null @@ -1,14 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ -#ifdef FOO - printf("FOO is defined\n"); -#endif - printf("VALUE is %d\n", VALUE); - return 0; -} diff --git a/mozc_build_tools/gyp/test/defines/defines.gyp b/mozc_build_tools/gyp/test/defines/defines.gyp deleted file mode 100644 index 3db66e5..0000000 --- a/mozc_build_tools/gyp/test/defines/defines.gyp +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'defines', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'FOO', - 'VALUE=1', - ], - }, - ], - 'conditions': [ - ['OS=="fakeos"', { - 'targets': [ - { - 'target_name': 'fakeosprogram', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'FOO', - 'VALUE=1', - ], - }, - ], - }], - ], -} diff --git a/mozc_build_tools/gyp/test/defines/gyptest-define-override.py b/mozc_build_tools/gyp/test/defines/gyptest-define-override.py deleted file mode 100644 index 82e325a..0000000 --- a/mozc_build_tools/gyp/test/defines/gyptest-define-override.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a default gyp define can be overridden. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# Command-line define -test.run_gyp('defines.gyp', '-D', 'OS=fakeos') -test.build('defines.gyp') -test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE) -# Clean up the exe so subsequent tests don't find an old exe. -os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE)) - -# Without "OS" override, fokeosprogram shouldn't be built. -test.run_gyp('defines.gyp') -test.build('defines.gyp') -test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE) - -# Environment define -os.environ['GYP_DEFINES'] = 'OS=fakeos' -test.run_gyp('defines.gyp') -test.build('defines.gyp') -test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/defines/gyptest-defines-env-regyp.py b/mozc_build_tools/gyp/test/defines/gyptest-defines-env-regyp.py deleted file mode 100644 index 70c9ba7..0000000 --- a/mozc_build_tools/gyp/test/defines/gyptest-defines-env-regyp.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define, and -the use of the environment during regeneration when the gyp file changes. -""" - -import os -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -try: - os.environ['GYP_DEFINES'] = 'value=50' - test.run_gyp('defines.gyp') -finally: - # We clear the environ after calling gyp. When the auto-regeneration happens, - # the same define should be reused anyway. Reset to empty string first in - # case the platform doesn't support unsetenv. - os.environ['GYP_DEFINES'] = '' - del os.environ['GYP_DEFINES'] - -test.build('defines.gyp') - -expect = """\ -FOO is defined -VALUE is 1 -""" -test.run_built_executable('defines', stdout=expect) - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('defines.gyp', test.read('defines-env.gyp')) - -test.build('defines.gyp', test.ALL) - -expect = """\ -VALUE is 50 -""" -test.run_built_executable('defines', stdout=expect) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/defines/gyptest-defines-env.py b/mozc_build_tools/gyp/test/defines/gyptest-defines-env.py deleted file mode 100644 index 6b4e717..0000000 --- a/mozc_build_tools/gyp/test/defines/gyptest-defines-env.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# With the value only given in environment, it should be used. -try: - os.environ['GYP_DEFINES'] = 'value=10' - test.run_gyp('defines-env.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.build('defines-env.gyp') - -expect = """\ -VALUE is 10 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value given in both command line and environment, -# command line should take precedence. -try: - os.environ['GYP_DEFINES'] = 'value=20' - test.run_gyp('defines-env.gyp', '-Dvalue=25') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 25 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value only given in environment, it should be ignored if -# --ignore-environment is specified. -try: - os.environ['GYP_DEFINES'] = 'value=30' - test.run_gyp('defines-env.gyp', '--ignore-environment') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 5 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value given in both command line and environment, and -# --ignore-environment also specified, command line should still be used. -try: - os.environ['GYP_DEFINES'] = 'value=40' - test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 45 -""" -test.run_built_executable('defines', stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/defines/gyptest-defines.py b/mozc_build_tools/gyp/test/defines/gyptest-defines.py deleted file mode 100644 index a21a617..0000000 --- a/mozc_build_tools/gyp/test/defines/gyptest-defines.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ defines. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('defines.gyp') - -test.build('defines.gyp') - -expect = """\ -FOO is defined -VALUE is 1 -""" -test.run_built_executable('defines', stdout=expect) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.c b/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.c deleted file mode 100644 index 4407572..0000000 --- a/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2010 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf(TEST_FORMAT, TEST_ARGS); - return 0; -} diff --git a/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.gyp b/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.gyp deleted file mode 100644 index 6f0f3fd..0000000 --- a/mozc_build_tools/gyp/test/defines-escaping/defines-escaping.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'defines_escaping', - 'type': 'executable', - 'sources': [ - 'defines-escaping.c', - ], - 'defines': [ - 'TEST_FORMAT="<(test_format)"', - 'TEST_ARGS=<(test_args)', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/defines-escaping/gyptest-defines-escaping.py b/mozc_build_tools/gyp/test/defines-escaping/gyptest-defines-escaping.py deleted file mode 100644 index db0e592..0000000 --- a/mozc_build_tools/gyp/test/defines-escaping/gyptest-defines-escaping.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define using -various special characters such as quotes, commas, etc. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# Tests string literals, percents, and backslash escapes. -try: - os.environ['GYP_DEFINES'] = \ - """test_format='%s\\n' test_args='"Simple test of %s with a literal"'""" - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.build('defines-escaping.gyp') - -expect = """\ -Simple test of %s with a literal -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test multiple comma-and-space-separated string literals. -try: - os.environ['GYP_DEFINES'] = \ - """test_format='%s and %s\\n' test_args='"foo", "bar"'""" - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -foo and bar -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing quotes. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s %s %s %s %s\\n' """ + - """test_args='"\\"These,\\"",""" + - """ "\\"words,\\"",""" - """ "\\"are,\\"",""" + - """ "\\"in,\\"",""" + - """ "\\"quotes.\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -"These," "words," "are," "in," "quotes." -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing single quotes. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s %s %s %s %s\\n' """ + - """test_args="\\"'These,'\\",""" + - """ \\"'words,'\\",""" - """ \\"'are,'\\",""" + - """ \\"'in,'\\",""" + - """ \\"'quotes.'\\"" """) - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -'These,' 'words,' 'are,' 'in,' 'quotes.' -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing different numbers of backslashes before quotes -# (to exercise Windows' quoting behaviour). -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n%s\\n%s\\n' """ + - """test_args='"\\\\\\"1 visible slash\\\\\\"",""" + - """ "\\\\\\\\\\"2 visible slashes\\\\\\\\\\"",""" - """ "\\\\\\\\\\\\\\"3 visible slashes\\\\\\\\\\\\\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -\\"1 visible slash\\" -\\\\"2 visible slashes\\\\" -\\\\\\"3 visible slashes\\\\\\" -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test that various scary sequences are passed unfettered. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n' """ + - """test_args='"%PATH%, $foo, " `foo`;"'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -%PATH%, $foo, " `foo`; -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test commas and semi-colons preceded by backslashes (to exercise Windows' -# quoting behaviour). -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n%s\\n' """ + - """test_args='"\\\\, \\\\\\\\;",""" + - # Same thing again, but enclosed in visible quotes. - """ "\\"\\\\, \\\\\\\\;\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -\\, \\\\; -"\\, \\\\;" -""" -test.run_built_executable('defines_escaping', stdout=expect) - -# We deliberately do not test having an odd number of quotes in a string -# literal because that isn't feasible in MSVS. diff --git a/mozc_build_tools/gyp/test/dependencies/a.c b/mozc_build_tools/gyp/test/dependencies/a.c deleted file mode 100755 index 3bba111..0000000 --- a/mozc_build_tools/gyp/test/dependencies/a.c +++ /dev/null @@ -1,9 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -extern int funcB(); - -int funcA() { - return funcB(); -} diff --git a/mozc_build_tools/gyp/test/dependencies/b/b.c b/mozc_build_tools/gyp/test/dependencies/b/b.c deleted file mode 100755 index b5e771b..0000000 --- a/mozc_build_tools/gyp/test/dependencies/b/b.c +++ /dev/null @@ -1,3 +0,0 @@ -int funcB() { - return 2; -} diff --git a/mozc_build_tools/gyp/test/dependencies/b/b.gyp b/mozc_build_tools/gyp/test/dependencies/b/b.gyp deleted file mode 100755 index f09e1ff..0000000 --- a/mozc_build_tools/gyp/test/dependencies/b/b.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'b', - 'type': 'static_library', - 'sources': [ - 'b.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/dependencies/c/c.c b/mozc_build_tools/gyp/test/dependencies/c/c.c deleted file mode 100644 index 4949daf..0000000 --- a/mozc_build_tools/gyp/test/dependencies/c/c.c +++ /dev/null @@ -1,4 +0,0 @@ -int funcC() { - return 3 - // Intentional syntax error. This file should never be compiled, so this - // shouldn't be a problem. diff --git a/mozc_build_tools/gyp/test/dependencies/c/c.gyp b/mozc_build_tools/gyp/test/dependencies/c/c.gyp deleted file mode 100644 index eabebea..0000000 --- a/mozc_build_tools/gyp/test/dependencies/c/c.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'c_unused', - 'type': 'static_library', - 'sources': [ - 'c.c', - ], - }, - { - 'target_name': 'd', - 'type': 'static_library', - 'sources': [ - 'd.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/dependencies/c/d.c b/mozc_build_tools/gyp/test/dependencies/c/d.c deleted file mode 100644 index 05465fc..0000000 --- a/mozc_build_tools/gyp/test/dependencies/c/d.c +++ /dev/null @@ -1,3 +0,0 @@ -int funcD() { - return 4; -} diff --git a/mozc_build_tools/gyp/test/dependencies/extra_targets.gyp b/mozc_build_tools/gyp/test/dependencies/extra_targets.gyp deleted file mode 100644 index c1a26de..0000000 --- a/mozc_build_tools/gyp/test/dependencies/extra_targets.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - # This only depends on the "d" target; other targets in c.gyp - # should not become part of the build (unlike with 'c/c.gyp:*'). - 'dependencies': ['c/c.gyp:d'], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/dependencies/gyptest-extra-targets.py b/mozc_build_tools/gyp/test/dependencies/gyptest-extra-targets.py deleted file mode 100644 index 3752f74..0000000 --- a/mozc_build_tools/gyp/test/dependencies/gyptest-extra-targets.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify that dependencies don't pull unused targets into the build. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('extra_targets.gyp') - -# This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax -# error and won't compile. -test.build('extra_targets.gyp', test.ALL) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/dependencies/gyptest-lib-only.py b/mozc_build_tools/gyp/test/dependencies/gyptest-lib-only.py deleted file mode 100755 index d90d88f..0000000 --- a/mozc_build_tools/gyp/test/dependencies/gyptest-lib-only.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify that a link time only dependency will get pulled into the set of built -targets, even if no executable uses it. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('lib_only.gyp') - -test.build('lib_only.gyp', test.ALL) - -# Make doesn't put static libs in a common 'lib' directory, like it does with -# shared libs, so check in the obj path corresponding to the source path. -test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target') - -# TODO(bradnelson/mark): -# On linux and windows a library target will at least pull its link dependencies -# into the generated sln/_main.scons, since not doing so confuses users. -# This is not currently implemented on mac, which has the opposite behavior. -if test.format == 'xcode': - test.built_file_must_not_exist('b', type=test.STATIC_LIB) -else: - test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/dependencies/lib_only.gyp b/mozc_build_tools/gyp/test/dependencies/lib_only.gyp deleted file mode 100755 index f6c84de..0000000 --- a/mozc_build_tools/gyp/test/dependencies/lib_only.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - 'dependencies': ['b/b.gyp:b'], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/dependency-copy/gyptest-copy.py b/mozc_build_tools/gyp/test/dependency-copy/gyptest-copy.py deleted file mode 100644 index 5ba7c73..0000000 --- a/mozc_build_tools/gyp/test/dependency-copy/gyptest-copy.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies dependencies do the copy step. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.build('copies.gyp', 'proj2', chdir='src') - -test.run_built_executable('proj1', - chdir='src', - stdout="Hello from file1.c\n") -test.run_built_executable('proj2', - chdir='src', - stdout="Hello from file2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/dependency-copy/src/copies.gyp b/mozc_build_tools/gyp/test/dependency-copy/src/copies.gyp deleted file mode 100644 index 4176b18..0000000 --- a/mozc_build_tools/gyp/test/dependency-copy/src/copies.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'proj1', - 'type': 'executable', - 'sources': [ - 'file1.c', - ], - }, - { - 'target_name': 'proj2', - 'type': 'executable', - 'sources': [ - 'file2.c', - ], - 'dependencies': [ - 'proj1', - ] - }, - ], -} diff --git a/mozc_build_tools/gyp/test/dependency-copy/src/file1.c b/mozc_build_tools/gyp/test/dependency-copy/src/file1.c deleted file mode 100644 index 3caf5d6..0000000 --- a/mozc_build_tools/gyp/test/dependency-copy/src/file1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from file1.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/dependency-copy/src/file2.c b/mozc_build_tools/gyp/test/dependency-copy/src/file2.c deleted file mode 100644 index ed45cc0..0000000 --- a/mozc_build_tools/gyp/test/dependency-copy/src/file2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from file2.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/actions/actions.gyp b/mozc_build_tools/gyp/test/generator-output/actions/actions.gyp deleted file mode 100644 index dded59a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/actions.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/actions/build/README.txt b/mozc_build_tools/gyp/test/generator-output/actions/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/build/README.txt b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/executable.gyp b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/executable.gyp deleted file mode 100644 index 6bdd60a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/executable.gyp +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - ], - 'actions': [ - { - 'action_name': 'make-prog1', - 'inputs': [ - 'make-prog1.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/prog1.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - { - 'action_name': 'make-prog2', - 'inputs': [ - 'make-prog2.py', - ], - 'outputs': [ - 'actions-out/prog2.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog1.py b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog1.py deleted file mode 100644 index 7ea1d8a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog1.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog1(void) -{ - printf("Hello from make-prog1.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog2.py b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog2.py deleted file mode 100644 index 0bfe497..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/make-prog2.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog2(void) -{ - printf("Hello from make-prog2.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/program.c b/mozc_build_tools/gyp/test/generator-output/actions/subdir1/program.c deleted file mode 100644 index d5f661d..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt b/mozc_build_tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/build/README.txt b/mozc_build_tools/gyp/test/generator-output/actions/subdir2/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/make-file.py b/mozc_build_tools/gyp/test/generator-output/actions/subdir2/make-file.py deleted file mode 100644 index fff0653..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = "Hello from make-file.py\n" - -open(sys.argv[1], 'wb').write(contents) diff --git a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/none.gyp b/mozc_build_tools/gyp/test/generator-output/actions/subdir2/none.gyp deleted file mode 100644 index f98f527..0000000 --- a/mozc_build_tools/gyp/test/generator-output/actions/subdir2/none.gyp +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - 'actions-out/file.out', - # TODO: enhance testing infrastructure to test this - # without having to hard-code the intermediate dir paths. - #'<(INTERMEDIATE_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/copies/build/README.txt b/mozc_build_tools/gyp/test/generator-output/copies/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/copies/copies-out/README.txt b/mozc_build_tools/gyp/test/generator-output/copies/copies-out/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/copies-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/copies/copies.gyp b/mozc_build_tools/gyp/test/generator-output/copies/copies.gyp deleted file mode 100644 index 479a3d9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/copies.gyp +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_subdir', - 'type': 'none', - 'dependencies': [ - 'subdir/subdir.gyp:*', - ], - }, - { - 'target_name': 'copies1', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file1', - ], - }, - ], - }, - { - 'target_name': 'copies2', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file2', - ], - }, - ], - }, - # Verify that a null 'files' list doesn't gag the generators. - { - 'target_name': 'copies_null', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-null', - 'files': [], - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/copies/file1 b/mozc_build_tools/gyp/test/generator-output/copies/file1 deleted file mode 100644 index 84d55c5..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/file1 +++ /dev/null @@ -1 +0,0 @@ -file1 contents diff --git a/mozc_build_tools/gyp/test/generator-output/copies/file2 b/mozc_build_tools/gyp/test/generator-output/copies/file2 deleted file mode 100644 index af1b8ae..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/file2 +++ /dev/null @@ -1 +0,0 @@ -file2 contents diff --git a/mozc_build_tools/gyp/test/generator-output/copies/subdir/build/README.txt b/mozc_build_tools/gyp/test/generator-output/copies/subdir/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/subdir/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt b/mozc_build_tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/copies/subdir/file3 b/mozc_build_tools/gyp/test/generator-output/copies/subdir/file3 deleted file mode 100644 index 43f16f3..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/subdir/file3 +++ /dev/null @@ -1 +0,0 @@ -file3 contents diff --git a/mozc_build_tools/gyp/test/generator-output/copies/subdir/file4 b/mozc_build_tools/gyp/test/generator-output/copies/subdir/file4 deleted file mode 100644 index 5f7270a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/subdir/file4 +++ /dev/null @@ -1 +0,0 @@ -file4 contents diff --git a/mozc_build_tools/gyp/test/generator-output/copies/subdir/subdir.gyp b/mozc_build_tools/gyp/test/generator-output/copies/subdir/subdir.gyp deleted file mode 100644 index af031d2..0000000 --- a/mozc_build_tools/gyp/test/generator-output/copies/subdir/subdir.gyp +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'copies3', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file3', - ], - }, - ], - }, - { - 'target_name': 'copies4', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file4', - ], - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-actions.py b/mozc_build_tools/gyp/test/generator-output/gyptest-actions.py deleted file mode 100644 index 73ac5ae..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-actions.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies --generator-output= behavior when using actions. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -# All the generated files should go under 'gypfiles'. The source directory -# ('actions') should be untouched. -test.writable(test.workpath('actions'), False) -test.run_gyp('actions.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='actions') - -test.writable(test.workpath('actions'), True) - -test.relocate('actions', 'relocate/actions') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/actions'), False) - -# Some of the action outputs use "pure" relative paths (i.e. without prefixes -# like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under -# 'gypfiles', such outputs will still be created relative to the original .gyp -# sources. Projects probably wouldn't normally do this, since it kind of defeats -# the purpose of '--generator-output', but it is supported behaviour. -test.writable(test.workpath('relocate/actions/build'), True) -test.writable(test.workpath('relocate/actions/subdir1/build'), True) -test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True) -test.writable(test.workpath('relocate/actions/subdir2/build'), True) -test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True) - -test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/actions/subdir1' -else: - chdir = 'relocate/gypfiles' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -test.must_match('relocate/actions/subdir2/actions-out/file.out', - "Hello from make-file.py\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-copies.py b/mozc_build_tools/gyp/test/generator-output/gyptest-copies.py deleted file mode 100644 index 414b7c3..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-copies.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('copies'), False) - -test.run_gyp('copies.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='copies') - -test.writable(test.workpath('copies'), True) - -test.relocate('copies', 'relocate/copies') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/copies'), False) - -test.writable(test.workpath('relocate/copies/build'), True) -test.writable(test.workpath('relocate/copies/copies-out'), True) -test.writable(test.workpath('relocate/copies/subdir/build'), True) -test.writable(test.workpath('relocate/copies/subdir/copies-out'), True) - -test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles') - -test.must_match(['relocate', 'copies', 'copies-out', 'file1'], - "file1 contents\n") - -if test.format == 'xcode': - chdir = 'relocate/copies/build' -elif test.format == 'make': - chdir = 'relocate/gypfiles/out' -else: - chdir = 'relocate/gypfiles' -test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n") - -test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'], - "file3 contents\n") - -if test.format == 'xcode': - chdir = 'relocate/copies/subdir/build' -elif test.format == 'make': - chdir = 'relocate/gypfiles/out' -else: - chdir = 'relocate/gypfiles' -test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-relocate.py b/mozc_build_tools/gyp/test/generator-output/gyptest-relocate.py deleted file mode 100644 index dd1c2bd..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-relocate.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a project hierarchy created with the --generator-output= -option can be built even when it's relocated to a different path. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.run_gyp('prog1.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src') - -test.writable(test.workpath('src'), True) - -test.relocate('src', 'relocate/src') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/src'), False) - -test.writable(test.workpath('relocate/src/build'), True) -test.writable(test.workpath('relocate/src/subdir2/build'), True) -test.writable(test.workpath('relocate/src/subdir3/build'), True) - -test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles') - -chdir = 'relocate/gypfiles' - -expect = """\ -Hello from %s -Hello from inc.h -Hello from inc1/include1.h -Hello from inc2/include2.h -Hello from inc3/include3.h -Hello from subdir2/deeper/deeper.h -""" - -if test.format == 'xcode': - chdir = 'relocate/src' -test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir2' -test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-rules.py b/mozc_build_tools/gyp/test/generator-output/gyptest-rules.py deleted file mode 100644 index 05b674f..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-rules.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies --generator-output= behavior when using rules. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('rules'), False) - -test.run_gyp('rules.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='rules') - -test.writable(test.workpath('rules'), True) - -test.relocate('rules', 'relocate/rules') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/rules'), False) - -test.writable(test.workpath('relocate/rules/build'), True) -test.writable(test.workpath('relocate/rules/subdir1/build'), True) -test.writable(test.workpath('relocate/rules/subdir2/build'), True) -test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True) - -test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles') - -expect = """\ -Hello from program.c -Hello from function1.in1 -Hello from function2.in1 -Hello from define3.in0 -Hello from define4.in0 -""" - -if test.format == 'xcode': - chdir = 'relocate/rules/subdir1' -else: - chdir = 'relocate/gypfiles' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -test.must_match('relocate/rules/subdir2/rules-out/file1.out', - "Hello from file1.in0\n") -test.must_match('relocate/rules/subdir2/rules-out/file2.out', - "Hello from file2.in0\n") -test.must_match('relocate/rules/subdir2/rules-out/file3.out', - "Hello from file3.in1\n") -test.must_match('relocate/rules/subdir2/rules-out/file4.out', - "Hello from file4.in1\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-subdir2-deep.py b/mozc_build_tools/gyp/test/generator-output/gyptest-subdir2-deep.py deleted file mode 100644 index ea1b472..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-subdir2-deep.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target from a .gyp file a few subdirectories -deep when the --generator-output= option is used to put the build -configuration files in a separate directory tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.writable(test.workpath('src/subdir2/deeper/build'), True) - -test.run_gyp('deeper.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src/subdir2/deeper') - -test.build('deeper.gyp', test.ALL, chdir='gypfiles') - -chdir = 'gypfiles' - -if test.format == 'xcode': - chdir = 'src/subdir2/deeper' -test.run_built_executable('deeper', - chdir=chdir, - stdout="Hello from deeper.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/gyptest-top-all.py b/mozc_build_tools/gyp/test/generator-output/gyptest-top-all.py deleted file mode 100644 index 902ceb2..0000000 --- a/mozc_build_tools/gyp/test/generator-output/gyptest-top-all.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a project hierarchy created when the --generator-output= -option is used to put the build configuration files in a separate -directory tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.run_gyp('prog1.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src') - -test.writable(test.workpath('src/build'), True) -test.writable(test.workpath('src/subdir2/build'), True) -test.writable(test.workpath('src/subdir3/build'), True) - -test.build('prog1.gyp', test.ALL, chdir='gypfiles') - -chdir = 'gypfiles' - -expect = """\ -Hello from %s -Hello from inc.h -Hello from inc1/include1.h -Hello from inc2/include2.h -Hello from inc3/include3.h -Hello from subdir2/deeper/deeper.h -""" - -if test.format == 'xcode': - chdir = 'src' -test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c') - -if test.format == 'xcode': - chdir = 'src/subdir2' -test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c') - -if test.format == 'xcode': - chdir = 'src/subdir3' -test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/generator-output/rules/build/README.txt b/mozc_build_tools/gyp/test/generator-output/rules/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/rules/copy-file.py b/mozc_build_tools/gyp/test/generator-output/rules/copy-file.py deleted file mode 100644 index 938c336..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/copy-file.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/generator-output/rules/rules.gyp b/mozc_build_tools/gyp/test/generator-output/rules/rules.gyp deleted file mode 100644 index dded59a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/rules.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/build/README.txt b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define3.in0 b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define3.in0 deleted file mode 100644 index cc29c64..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define3.in0 +++ /dev/null @@ -1 +0,0 @@ -#define STRING3 "Hello from define3.in0\n" diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define4.in0 b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define4.in0 deleted file mode 100644 index c9b0467..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/define4.in0 +++ /dev/null @@ -1 +0,0 @@ -#define STRING4 "Hello from define4.in0\n" diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/executable.gyp b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/executable.gyp deleted file mode 100644 index 2fd89a0..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/executable.gyp +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function1.in1', - 'function2.in1', - 'define3.in0', - 'define4.in0', - ], - 'include_dirs': [ - '<(INTERMEDIATE_DIR)', - ], - 'rules': [ - { - 'rule_name': 'copy_file_0', - 'extension': 'in0', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 0, - }, - { - 'rule_name': 'copy_file_1', - 'extension': 'in1', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function1.in1 b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function1.in1 deleted file mode 100644 index 545e7ca..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function1.in1 +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function1(void) -{ - printf("Hello from function1.in1\n"); -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function2.in1 b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function2.in1 deleted file mode 100644 index 6bad43f..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/function2.in1 +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function2(void) -{ - printf("Hello from function2.in1\n"); -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/program.c b/mozc_build_tools/gyp/test/generator-output/rules/subdir1/program.c deleted file mode 100644 index 27fd31e..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir1/program.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include "define3.h" -#include "define4.h" - -extern void function1(void); -extern void function2(void); -extern void function3(void); -extern void function4(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function1(); - function2(); - printf("%s", STRING3); - printf("%s", STRING4); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/build/README.txt b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file1.in0 b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file1.in0 deleted file mode 100644 index 7aca64f..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file1.in0 +++ /dev/null @@ -1 +0,0 @@ -Hello from file1.in0 diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file2.in0 b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file2.in0 deleted file mode 100644 index 80a281a..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file2.in0 +++ /dev/null @@ -1 +0,0 @@ -Hello from file2.in0 diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file3.in1 b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file3.in1 deleted file mode 100644 index 60ae2e7..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file3.in1 +++ /dev/null @@ -1 +0,0 @@ -Hello from file3.in1 diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file4.in1 b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file4.in1 deleted file mode 100644 index 5a3c307..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/file4.in1 +++ /dev/null @@ -1 +0,0 @@ -Hello from file4.in1 diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/none.gyp b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/none.gyp deleted file mode 100644 index 664cbd9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/none.gyp +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'files', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'file1.in0', - 'file2.in0', - 'file3.in1', - 'file4.in1', - ], - 'rules': [ - { - 'rule_name': 'copy_file_0', - 'extension': 'in0', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - 'rules-out/<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 0, - }, - { - 'rule_name': 'copy_file_1', - 'extension': 'in1', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - 'rules-out/<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt b/mozc_build_tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/mozc_build_tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/src/build/README.txt b/mozc_build_tools/gyp/test/generator-output/src/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/src/inc.h b/mozc_build_tools/gyp/test/generator-output/src/inc.h deleted file mode 100644 index 57aa1a5..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "inc.h" diff --git a/mozc_build_tools/gyp/test/generator-output/src/inc1/include1.h b/mozc_build_tools/gyp/test/generator-output/src/inc1/include1.h deleted file mode 100644 index 1d59065..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/inc1/include1.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE1_STRING "inc1/include1.h" diff --git a/mozc_build_tools/gyp/test/generator-output/src/prog1.c b/mozc_build_tools/gyp/test/generator-output/src/prog1.c deleted file mode 100644 index 656f81d..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/prog1.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/prog1.gyp b/mozc_build_tools/gyp/test/generator-output/src/prog1.gyp deleted file mode 100644 index d50e6fb..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/prog1.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'dependencies': [ - 'subdir2/prog2.gyp:prog2', - ], - 'include_dirs': [ - '.', - 'inc1', - 'subdir2/inc2', - 'subdir3/inc3', - 'subdir2/deeper', - ], - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/build/README.txt b/mozc_build_tools/gyp/test/generator-output/src/subdir2/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt b/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c b/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c deleted file mode 100644 index 56c49d1..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from deeper.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp b/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp deleted file mode 100644 index 8648770..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'deeper', - 'type': 'executable', - 'sources': [ - 'deeper.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h b/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h deleted file mode 100644 index f6484a0..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h +++ /dev/null @@ -1 +0,0 @@ -#define DEEPER_STRING "subdir2/deeper/deeper.h" diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/inc2/include2.h b/mozc_build_tools/gyp/test/generator-output/src/subdir2/inc2/include2.h deleted file mode 100644 index 1ccfa5d..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/inc2/include2.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE2_STRING "inc2/include2.h" diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.c b/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.c deleted file mode 100644 index 38d6c84..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.gyp b/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.gyp deleted file mode 100644 index 7176ed8..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir2/prog2.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'include_dirs': [ - '..', - '../inc1', - 'inc2', - '../subdir3/inc3', - 'deeper', - ], - 'dependencies': [ - '../subdir3/prog3.gyp:prog3', - ], - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir3/build/README.txt b/mozc_build_tools/gyp/test/generator-output/src/subdir3/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir3/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir3/inc3/include3.h b/mozc_build_tools/gyp/test/generator-output/src/subdir3/inc3/include3.h deleted file mode 100644 index bf53bf1..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir3/inc3/include3.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE3_STRING "inc3/include3.h" diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.c b/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.c deleted file mode 100644 index 7848b45..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog3.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.gyp b/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.gyp deleted file mode 100644 index 46c5e00..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/subdir3/prog3.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'include_dirs': [ - '..', - '../inc1', - '../subdir2/inc2', - 'inc3', - '../subdir2/deeper', - ], - 'sources': [ - 'prog3.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/generator-output/src/symroot.gypi b/mozc_build_tools/gyp/test/generator-output/src/symroot.gypi deleted file mode 100644 index 5199164..0000000 --- a/mozc_build_tools/gyp/test/generator-output/src/symroot.gypi +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'set_symroot%': 0, - }, - 'conditions': [ - ['set_symroot == 1', { - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/build', - }, - }], - ], -} diff --git a/mozc_build_tools/gyp/test/hello/gyptest-all.py b/mozc_build_tools/gyp/test/hello/gyptest-all.py deleted file mode 100644 index 9ecff55..0000000 --- a/mozc_build_tools/gyp/test/hello/gyptest-all.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', test.ALL) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/hello/gyptest-default.py b/mozc_build_tools/gyp/test/hello/gyptest-default.py deleted file mode 100644 index 76fffb3..0000000 --- a/mozc_build_tools/gyp/test/hello/gyptest-default.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp') - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', test.DEFAULT) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/hello/gyptest-disable-regyp.py b/mozc_build_tools/gyp/test/hello/gyptest-disable-regyp.py deleted file mode 100644 index 1e4b306..0000000 --- a/mozc_build_tools/gyp/test/hello/gyptest-disable-regyp.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that Makefiles don't get rebuilt when a source gyp file changes and -the disable_regeneration generator flag is set. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp', '-Gauto_regeneration=0') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('hello.gyp', test.read('hello2.gyp')) - -test.build('hello.gyp', test.ALL) - -# Should still be the old executable, as regeneration was disabled. -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/hello/gyptest-regyp.py b/mozc_build_tools/gyp/test/hello/gyptest-regyp.py deleted file mode 100644 index 827c723..0000000 --- a/mozc_build_tools/gyp/test/hello/gyptest-regyp.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that Makefiles get rebuilt when a source gyp file changes. -""" - -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('hello.gyp', test.read('hello2.gyp')) - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, two!\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/hello/gyptest-target.py b/mozc_build_tools/gyp/test/hello/gyptest-target.py deleted file mode 100755 index 2f0a2a3..0000000 --- a/mozc_build_tools/gyp/test/hello/gyptest-target.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using an explicit build target of 'hello'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', 'hello') - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', 'hello') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/hello/hello.c b/mozc_build_tools/gyp/test/hello/hello.c deleted file mode 100644 index 8dbecc0..0000000 --- a/mozc_build_tools/gyp/test/hello/hello.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf("Hello, world!\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/hello/hello.gyp b/mozc_build_tools/gyp/test/hello/hello.gyp deleted file mode 100644 index 1974d51..0000000 --- a/mozc_build_tools/gyp/test/hello/hello.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/hello/hello2.c b/mozc_build_tools/gyp/test/hello/hello2.c deleted file mode 100644 index 19ef3fb..0000000 --- a/mozc_build_tools/gyp/test/hello/hello2.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf("Hello, two!\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/hello/hello2.gyp b/mozc_build_tools/gyp/test/hello/hello2.gyp deleted file mode 100644 index 25b08ca..0000000 --- a/mozc_build_tools/gyp/test/hello/hello2.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello', - 'type': 'executable', - 'sources': [ - 'hello2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py b/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py deleted file mode 100644 index a2b9f30..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation -and with regeneration. -""" - -import os -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -os.environ['HOME'] = os.path.abspath('home') - -test.run_gyp('all.gyp', chdir='src') - -# After relocating, we should still be able to build (build file shouldn't -# contain relative reference to ~/.gyp/includes.gypi) -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome\n"); - -# Building should notice any changes to ~/.gyp/includes.gypi and regyp. -test.sleep() - -test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi')) - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome2\n"); - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes.py b/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes.py deleted file mode 100644 index 6a0e965..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/gyptest-home-includes.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies inclusion of $HOME/.gyp/includes.gypi works. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -os.environ['HOME'] = os.path.abspath('home') - -test.run_gyp('all.gyp', chdir='src') - -# After relocating, we should still be able to build (build file shouldn't -# contain relative reference to ~/.gyp/includes.gypi) -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome\n"); - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi b/mozc_build_tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi deleted file mode 100644 index fcfb39b..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi +++ /dev/null @@ -1,5 +0,0 @@ -{ - 'variables': { - 'foo': '"fromhome"', - }, -} diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi b/mozc_build_tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi deleted file mode 100644 index f0d84b3..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi +++ /dev/null @@ -1,5 +0,0 @@ -{ - 'variables': { - 'foo': '"fromhome2"', - }, -} diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/src/all.gyp b/mozc_build_tools/gyp/test/home_dot_gyp/src/all.gyp deleted file mode 100644 index 14b6aea..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/src/all.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'foo%': '"fromdefault"', - }, - 'targets': [ - { - 'target_name': 'printfoo', - 'type': 'executable', - 'sources': [ - 'printfoo.c', - ], - 'defines': [ - 'FOO=<(foo)', - ], - }, - ], -} - diff --git a/mozc_build_tools/gyp/test/home_dot_gyp/src/printfoo.c b/mozc_build_tools/gyp/test/home_dot_gyp/src/printfoo.c deleted file mode 100644 index 92d2cba..0000000 --- a/mozc_build_tools/gyp/test/home_dot_gyp/src/printfoo.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("FOO is %s\n", FOO); - return 0; -} diff --git a/mozc_build_tools/gyp/test/include_dirs/gyptest-all.py b/mozc_build_tools/gyp/test/include_dirs/gyptest-all.py deleted file mode 100644 index 7496600..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/gyptest-all.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies use of include_dirs when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('includes.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('includes.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from includes.c -Hello from inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('includes', stdout=expect, chdir='relocate/src') - -if test.format == 'xcode': - chdir='relocate/src/subdir' -else: - chdir='relocate/src' - -expect = """\ -Hello from subdir/subdir_includes.c -Hello from subdir/inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/include_dirs/gyptest-default.py b/mozc_build_tools/gyp/test/include_dirs/gyptest-default.py deleted file mode 100644 index 467f58d..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/gyptest-default.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies use of include_dirs when using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('includes.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('includes.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from includes.c -Hello from inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('includes', stdout=expect, chdir='relocate/src') - -if test.format == 'xcode': - chdir='relocate/src/subdir' -else: - chdir='relocate/src' - -expect = """\ -Hello from subdir/subdir_includes.c -Hello from subdir/inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/include_dirs/src/inc.h b/mozc_build_tools/gyp/test/include_dirs/src/inc.h deleted file mode 100644 index 0398d69..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "inc.h" diff --git a/mozc_build_tools/gyp/test/include_dirs/src/inc1/include1.h b/mozc_build_tools/gyp/test/include_dirs/src/inc1/include1.h deleted file mode 100644 index 43356b5..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/inc1/include1.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE1_STRING "include1.h" diff --git a/mozc_build_tools/gyp/test/include_dirs/src/includes.c b/mozc_build_tools/gyp/test/include_dirs/src/includes.c deleted file mode 100644 index 756c427..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/includes.c +++ /dev/null @@ -1,14 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from includes.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - return 0; -} diff --git a/mozc_build_tools/gyp/test/include_dirs/src/includes.gyp b/mozc_build_tools/gyp/test/include_dirs/src/includes.gyp deleted file mode 100644 index a2a55cc..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/includes.gyp +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'includes', - 'type': 'executable', - 'dependencies': [ - 'subdir/subdir_includes.gyp:subdir_includes', - ], - 'include_dirs': [ - '.', - 'inc1', - 'subdir/inc2', - ], - 'sources': [ - 'includes.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc.h b/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc.h deleted file mode 100644 index 0a68d7b..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "subdir/inc.h" diff --git a/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc2/include2.h b/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc2/include2.h deleted file mode 100644 index 721577e..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/subdir/inc2/include2.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE2_STRING "subdir/inc2/include2.h" diff --git a/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.c b/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.c deleted file mode 100644 index 727f682..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.c +++ /dev/null @@ -1,14 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir/subdir_includes.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - return 0; -} diff --git a/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp b/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp deleted file mode 100644 index 257d052..0000000 --- a/mozc_build_tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'subdir_includes', - 'type': 'executable', - 'include_dirs': [ - '.', - '../inc1', - 'inc2', - ], - 'sources': [ - 'subdir_includes.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/lib/README.txt b/mozc_build_tools/gyp/test/lib/README.txt deleted file mode 100644 index b3d7245..0000000 --- a/mozc_build_tools/gyp/test/lib/README.txt +++ /dev/null @@ -1,17 +0,0 @@ -Supporting modules for GYP testing. - - TestCmd.py - TestCommon.py - - Modules for generic testing of command-line utilities, - specifically including the ability to copy a test configuration - to temporary directories (with default cleanup on exit) as part - of running test scripts that invoke commands, compare actual - against expected output, etc. - - Our copies of these come from the SCons project, - http://www.scons.org/. - - TestGyp.py - - Modules for GYP-specific tests, of course. diff --git a/mozc_build_tools/gyp/test/lib/TestCmd.py b/mozc_build_tools/gyp/test/lib/TestCmd.py deleted file mode 100644 index 029c1d0..0000000 --- a/mozc_build_tools/gyp/test/lib/TestCmd.py +++ /dev/null @@ -1,1591 +0,0 @@ -""" -TestCmd.py: a testing framework for commands and scripts. - -The TestCmd module provides a framework for portable automated testing -of executable commands and scripts (in any language, not just Python), -especially commands and scripts that require file system interaction. - -In addition to running tests and evaluating conditions, the TestCmd -module manages and cleans up one or more temporary workspace -directories, and provides methods for creating files and directories in -those workspace directories from in-line data, here-documents), allowing -tests to be completely self-contained. - -A TestCmd environment object is created via the usual invocation: - - import TestCmd - test = TestCmd.TestCmd() - -There are a bunch of keyword arguments available at instantiation: - - test = TestCmd.TestCmd(description = 'string', - program = 'program_or_script_to_test', - interpreter = 'script_interpreter', - workdir = 'prefix', - subdir = 'subdir', - verbose = Boolean, - match = default_match_function, - diff = default_diff_function, - combine = Boolean) - -There are a bunch of methods that let you do different things: - - test.verbose_set(1) - - test.description_set('string') - - test.program_set('program_or_script_to_test') - - test.interpreter_set('script_interpreter') - test.interpreter_set(['script_interpreter', 'arg']) - - test.workdir_set('prefix') - test.workdir_set('') - - test.workpath('file') - test.workpath('subdir', 'file') - - test.subdir('subdir', ...) - - test.rmdir('subdir', ...) - - test.write('file', "contents\n") - test.write(['subdir', 'file'], "contents\n") - - test.read('file') - test.read(['subdir', 'file']) - test.read('file', mode) - test.read(['subdir', 'file'], mode) - - test.writable('dir', 1) - test.writable('dir', None) - - test.preserve(condition, ...) - - test.cleanup(condition) - - test.command_args(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program') - - test.run(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program', - chdir = 'directory_to_chdir_to', - stdin = 'input to feed to the program\n') - universal_newlines = True) - - p = test.start(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program', - universal_newlines = None) - - test.finish(self, p) - - test.pass_test() - test.pass_test(condition) - test.pass_test(condition, function) - - test.fail_test() - test.fail_test(condition) - test.fail_test(condition, function) - test.fail_test(condition, function, skip) - - test.no_result() - test.no_result(condition) - test.no_result(condition, function) - test.no_result(condition, function, skip) - - test.stdout() - test.stdout(run) - - test.stderr() - test.stderr(run) - - test.symlink(target, link) - - test.banner(string) - test.banner(string, width) - - test.diff(actual, expected) - - test.match(actual, expected) - - test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n") - test.match_exact(["actual 1\n", "actual 2\n"], - ["expected 1\n", "expected 2\n"]) - - test.match_re("actual 1\nactual 2\n", regex_string) - test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes) - - test.match_re_dotall("actual 1\nactual 2\n", regex_string) - test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes) - - test.tempdir() - test.tempdir('temporary-directory') - - test.sleep() - test.sleep(seconds) - - test.where_is('foo') - test.where_is('foo', 'PATH1:PATH2') - test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4') - - test.unlink('file') - test.unlink('subdir', 'file') - -The TestCmd module provides pass_test(), fail_test(), and no_result() -unbound functions that report test results for use with the Aegis change -management system. These methods terminate the test immediately, -reporting PASSED, FAILED, or NO RESULT respectively, and exiting with -status 0 (success), 1 or 2 respectively. This allows for a distinction -between an actual failed test and a test that could not be properly -evaluated because of an external condition (such as a full file system -or incorrect permissions). - - import TestCmd - - TestCmd.pass_test() - TestCmd.pass_test(condition) - TestCmd.pass_test(condition, function) - - TestCmd.fail_test() - TestCmd.fail_test(condition) - TestCmd.fail_test(condition, function) - TestCmd.fail_test(condition, function, skip) - - TestCmd.no_result() - TestCmd.no_result(condition) - TestCmd.no_result(condition, function) - TestCmd.no_result(condition, function, skip) - -The TestCmd module also provides unbound functions that handle matching -in the same way as the match_*() methods described above. - - import TestCmd - - test = TestCmd.TestCmd(match = TestCmd.match_exact) - - test = TestCmd.TestCmd(match = TestCmd.match_re) - - test = TestCmd.TestCmd(match = TestCmd.match_re_dotall) - -The TestCmd module provides unbound functions that can be used for the -"diff" argument to TestCmd.TestCmd instantiation: - - import TestCmd - - test = TestCmd.TestCmd(match = TestCmd.match_re, - diff = TestCmd.diff_re) - - test = TestCmd.TestCmd(diff = TestCmd.simple_diff) - -The "diff" argument can also be used with standard difflib functions: - - import difflib - - test = TestCmd.TestCmd(diff = difflib.context_diff) - - test = TestCmd.TestCmd(diff = difflib.unified_diff) - -Lastly, the where_is() method also exists in an unbound function -version. - - import TestCmd - - TestCmd.where_is('foo') - TestCmd.where_is('foo', 'PATH1:PATH2') - TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4') -""" - -# Copyright 2000-2010 Steven Knight -# This module is free software, and you may redistribute it and/or modify -# it under the same terms as Python itself, so long as this copyright message -# and disclaimer are retained in their original form. -# -# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, -# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF -# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -# DAMAGE. -# -# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, -# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, -# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - -__author__ = "Steven Knight " -__revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight" -__version__ = "0.37" - -import errno -import os -import os.path -import re -import shutil -import stat -import string -import sys -import tempfile -import time -import traceback -import types -import UserList - -__all__ = [ - 'diff_re', - 'fail_test', - 'no_result', - 'pass_test', - 'match_exact', - 'match_re', - 'match_re_dotall', - 'python_executable', - 'TestCmd' -] - -try: - import difflib -except ImportError: - __all__.append('simple_diff') - -def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) - -try: - from UserString import UserString -except ImportError: - class UserString: - pass - -if hasattr(types, 'UnicodeType'): - def is_String(e): - return type(e) is types.StringType \ - or type(e) is types.UnicodeType \ - or isinstance(e, UserString) -else: - def is_String(e): - return type(e) is types.StringType or isinstance(e, UserString) - -tempfile.template = 'testcmd.' -if os.name in ('posix', 'nt'): - tempfile.template = 'testcmd.' + str(os.getpid()) + '.' -else: - tempfile.template = 'testcmd.' - -re_space = re.compile('\s') - -_Cleanup = [] - -_chain_to_exitfunc = None - -def _clean(): - global _Cleanup - cleanlist = filter(None, _Cleanup) - del _Cleanup[:] - cleanlist.reverse() - for test in cleanlist: - test.cleanup() - if _chain_to_exitfunc: - _chain_to_exitfunc() - -try: - import atexit -except ImportError: - # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc - try: - _chain_to_exitfunc = sys.exitfunc - except AttributeError: - pass - sys.exitfunc = _clean -else: - atexit.register(_clean) - -try: - zip -except NameError: - def zip(*lists): - result = [] - for i in xrange(min(map(len, lists))): - result.append(tuple(map(lambda l, i=i: l[i], lists))) - return result - -class Collector: - def __init__(self, top): - self.entries = [top] - def __call__(self, arg, dirname, names): - pathjoin = lambda n, d=dirname: os.path.join(d, n) - self.entries.extend(map(pathjoin, names)) - -def _caller(tblist, skip): - string = "" - arr = [] - for file, line, name, text in tblist: - if file[-10:] == "TestCmd.py": - break - arr = [(file, line, name, text)] + arr - atfrom = "at" - for file, line, name, text in arr[skip:]: - if name in ("?", ""): - name = "" - else: - name = " (" + name + ")" - string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name)) - atfrom = "\tfrom" - return string - -def fail_test(self = None, condition = 1, function = None, skip = 0): - """Cause the test to fail. - - By default, the fail_test() method reports that the test FAILED - and exits with a status of 1. If a condition argument is supplied, - the test fails only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - of = "" - desc = "" - sep = " " - if not self is None: - if self.program: - of = " of " + self.program - sep = "\n\t" - if self.description: - desc = " [" + self.description + "]" - sep = "\n\t" - - at = _caller(traceback.extract_stack(), skip) - sys.stderr.write("FAILED test" + of + desc + sep + at) - - sys.exit(1) - -def no_result(self = None, condition = 1, function = None, skip = 0): - """Causes a test to exit with no valid result. - - By default, the no_result() method reports NO RESULT for the test - and exits with a status of 2. If a condition argument is supplied, - the test fails only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - of = "" - desc = "" - sep = " " - if not self is None: - if self.program: - of = " of " + self.program - sep = "\n\t" - if self.description: - desc = " [" + self.description + "]" - sep = "\n\t" - - at = _caller(traceback.extract_stack(), skip) - sys.stderr.write("NO RESULT for test" + of + desc + sep + at) - - sys.exit(2) - -def pass_test(self = None, condition = 1, function = None): - """Causes a test to pass. - - By default, the pass_test() method reports PASSED for the test - and exits with a status of 0. If a condition argument is supplied, - the test passes only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - sys.stderr.write("PASSED\n") - sys.exit(0) - -def match_exact(lines = None, matches = None): - """ - """ - if not is_List(lines): - lines = string.split(lines, "\n") - if not is_List(matches): - matches = string.split(matches, "\n") - if len(lines) != len(matches): - return - for i in range(len(lines)): - if lines[i] != matches[i]: - return - return 1 - -def match_re(lines = None, res = None): - """ - """ - if not is_List(lines): - lines = string.split(lines, "\n") - if not is_List(res): - res = string.split(res, "\n") - if len(lines) != len(res): - return - for i in range(len(lines)): - s = "^" + res[i] + "$" - try: - expr = re.compile(s) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if not expr.search(lines[i]): - return - return 1 - -def match_re_dotall(lines = None, res = None): - """ - """ - if not type(lines) is type(""): - lines = string.join(lines, "\n") - if not type(res) is type(""): - res = string.join(res, "\n") - s = "^" + res + "$" - try: - expr = re.compile(s, re.DOTALL) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if expr.match(lines): - return 1 - -try: - import difflib -except ImportError: - pass -else: - def simple_diff(a, b, fromfile='', tofile='', - fromfiledate='', tofiledate='', n=3, lineterm='\n'): - """ - A function with the same calling signature as difflib.context_diff - (diff -c) and difflib.unified_diff (diff -u) but which prints - output like the simple, unadorned 'diff" command. - """ - sm = difflib.SequenceMatcher(None, a, b) - def comma(x1, x2): - return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2) - result = [] - for op, a1, a2, b1, b2 in sm.get_opcodes(): - if op == 'delete': - result.append("%sd%d" % (comma(a1, a2), b1)) - result.extend(map(lambda l: '< ' + l, a[a1:a2])) - elif op == 'insert': - result.append("%da%s" % (a1, comma(b1, b2))) - result.extend(map(lambda l: '> ' + l, b[b1:b2])) - elif op == 'replace': - result.append("%sc%s" % (comma(a1, a2), comma(b1, b2))) - result.extend(map(lambda l: '< ' + l, a[a1:a2])) - result.append('---') - result.extend(map(lambda l: '> ' + l, b[b1:b2])) - return result - -def diff_re(a, b, fromfile='', tofile='', - fromfiledate='', tofiledate='', n=3, lineterm='\n'): - """ - A simple "diff" of two sets of lines when the expected lines - are regular expressions. This is a really dumb thing that - just compares each line in turn, so it doesn't look for - chunks of matching lines and the like--but at least it lets - you know exactly which line first didn't compare correctl... - """ - result = [] - diff = len(a) - len(b) - if diff < 0: - a = a + ['']*(-diff) - elif diff > 0: - b = b + ['']*diff - i = 0 - for aline, bline in zip(a, b): - s = "^" + aline + "$" - try: - expr = re.compile(s) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if not expr.search(bline): - result.append("%sc%s" % (i+1, i+1)) - result.append('< ' + repr(a[i])) - result.append('---') - result.append('> ' + repr(b[i])) - i = i+1 - return result - -if os.name == 'java': - - python_executable = os.path.join(sys.prefix, 'jython') - -else: - - python_executable = sys.executable - -if sys.platform == 'win32': - - default_sleep_seconds = 2 - - def where_is(file, path=None, pathext=None): - if path is None: - path = os.environ['PATH'] - if is_String(path): - path = string.split(path, os.pathsep) - if pathext is None: - pathext = os.environ['PATHEXT'] - if is_String(pathext): - pathext = string.split(pathext, os.pathsep) - for ext in pathext: - if string.lower(ext) == string.lower(file[-len(ext):]): - pathext = [''] - break - for dir in path: - f = os.path.join(dir, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - return fext - return None - -else: - - def where_is(file, path=None, pathext=None): - if path is None: - path = os.environ['PATH'] - if is_String(path): - path = string.split(path, os.pathsep) - for dir in path: - f = os.path.join(dir, file) - if os.path.isfile(f): - try: - st = os.stat(f) - except OSError: - continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0111: - return f - return None - - default_sleep_seconds = 1 - - - -try: - import subprocess -except ImportError: - # The subprocess module doesn't exist in this version of Python, - # so we're going to cobble up something that looks just enough - # like its API for our purposes below. - import new - - subprocess = new.module('subprocess') - - subprocess.PIPE = 'PIPE' - subprocess.STDOUT = 'STDOUT' - subprocess.mswindows = (sys.platform == 'win32') - - try: - import popen2 - popen2.Popen3 - except AttributeError: - class Popen3: - universal_newlines = 1 - def __init__(self, command, **kw): - if sys.platform == 'win32' and command[0] == '"': - command = '"' + command + '"' - (stdin, stdout, stderr) = os.popen3(' ' + command) - self.stdin = stdin - self.stdout = stdout - self.stderr = stderr - def close_output(self): - self.stdout.close() - self.resultcode = self.stderr.close() - def wait(self): - resultcode = self.resultcode - if os.WIFEXITED(resultcode): - return os.WEXITSTATUS(resultcode) - elif os.WIFSIGNALED(resultcode): - return os.WTERMSIG(resultcode) - else: - return None - - else: - try: - popen2.Popen4 - except AttributeError: - # A cribbed Popen4 class, with some retrofitted code from - # the Python 1.5 Popen3 class methods to do certain things - # by hand. - class Popen4(popen2.Popen3): - childerr = None - - def __init__(self, cmd, bufsize=-1): - p2cread, p2cwrite = os.pipe() - c2pread, c2pwrite = os.pipe() - self.pid = os.fork() - if self.pid == 0: - # Child - os.dup2(p2cread, 0) - os.dup2(c2pwrite, 1) - os.dup2(c2pwrite, 2) - for i in range(3, popen2.MAXFD): - try: - os.close(i) - except: pass - try: - os.execvp(cmd[0], cmd) - finally: - os._exit(1) - # Shouldn't come here, I guess - os._exit(1) - os.close(p2cread) - self.tochild = os.fdopen(p2cwrite, 'w', bufsize) - os.close(c2pwrite) - self.fromchild = os.fdopen(c2pread, 'r', bufsize) - popen2._active.append(self) - - popen2.Popen4 = Popen4 - - class Popen3(popen2.Popen3, popen2.Popen4): - universal_newlines = 1 - def __init__(self, command, **kw): - if kw.get('stderr') == 'STDOUT': - apply(popen2.Popen4.__init__, (self, command, 1)) - else: - apply(popen2.Popen3.__init__, (self, command, 1)) - self.stdin = self.tochild - self.stdout = self.fromchild - self.stderr = self.childerr - def wait(self, *args, **kw): - resultcode = apply(popen2.Popen3.wait, (self,)+args, kw) - if os.WIFEXITED(resultcode): - return os.WEXITSTATUS(resultcode) - elif os.WIFSIGNALED(resultcode): - return os.WTERMSIG(resultcode) - else: - return None - - subprocess.Popen = Popen3 - - - -# From Josiah Carlson, -# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554 - -PIPE = subprocess.PIPE - -if subprocess.mswindows: - from win32file import ReadFile, WriteFile - from win32pipe import PeekNamedPipe - import msvcrt -else: - import select - import fcntl - - try: fcntl.F_GETFL - except AttributeError: fcntl.F_GETFL = 3 - - try: fcntl.F_SETFL - except AttributeError: fcntl.F_SETFL = 4 - -class Popen(subprocess.Popen): - def recv(self, maxsize=None): - return self._recv('stdout', maxsize) - - def recv_err(self, maxsize=None): - return self._recv('stderr', maxsize) - - def send_recv(self, input='', maxsize=None): - return self.send(input), self.recv(maxsize), self.recv_err(maxsize) - - def get_conn_maxsize(self, which, maxsize): - if maxsize is None: - maxsize = 1024 - elif maxsize < 1: - maxsize = 1 - return getattr(self, which), maxsize - - def _close(self, which): - getattr(self, which).close() - setattr(self, which, None) - - if subprocess.mswindows: - def send(self, input): - if not self.stdin: - return None - - try: - x = msvcrt.get_osfhandle(self.stdin.fileno()) - (errCode, written) = WriteFile(x, input) - except ValueError: - return self._close('stdin') - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - x = msvcrt.get_osfhandle(conn.fileno()) - (read, nAvail, nMessage) = PeekNamedPipe(x, 0) - if maxsize < nAvail: - nAvail = maxsize - if nAvail > 0: - (errCode, read) = ReadFile(x, nAvail, None) - except ValueError: - return self._close(which) - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close(which) - raise - - #if self.universal_newlines: - # read = self._translate_newlines(read) - return read - - else: - def send(self, input): - if not self.stdin: - return None - - if not select.select([], [self.stdin], [], 0)[1]: - return 0 - - try: - written = os.write(self.stdin.fileno(), input) - except OSError, why: - if why[0] == errno.EPIPE: #broken pipe - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - flags = fcntl.fcntl(conn, fcntl.F_GETFL) - except TypeError: - flags = None - else: - if not conn.closed: - fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK) - - try: - if not select.select([conn], [], [], 0)[0]: - return '' - - r = conn.read(maxsize) - if not r: - return self._close(which) - - #if self.universal_newlines: - # r = self._translate_newlines(r) - return r - finally: - if not conn.closed and not flags is None: - fcntl.fcntl(conn, fcntl.F_SETFL, flags) - -disconnect_message = "Other end disconnected!" - -def recv_some(p, t=.1, e=1, tr=5, stderr=0): - if tr < 1: - tr = 1 - x = time.time()+t - y = [] - r = '' - pr = p.recv - if stderr: - pr = p.recv_err - while time.time() < x or r: - r = pr() - if r is None: - if e: - raise Exception(disconnect_message) - else: - break - elif r: - y.append(r) - else: - time.sleep(max((x-time.time())/tr, 0)) - return ''.join(y) - -# TODO(3.0: rewrite to use memoryview() -def send_all(p, data): - while len(data): - sent = p.send(data) - if sent is None: - raise Exception(disconnect_message) - data = buffer(data, sent) - - - -try: - object -except NameError: - class object: - pass - - - -class TestCmd(object): - """Class TestCmd - """ - - def __init__(self, description = None, - program = None, - interpreter = None, - workdir = None, - subdir = None, - verbose = None, - match = None, - diff = None, - combine = 0, - universal_newlines = 1): - self._cwd = os.getcwd() - self.description_set(description) - self.program_set(program) - self.interpreter_set(interpreter) - if verbose is None: - try: - verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) ) - except ValueError: - verbose = 0 - self.verbose_set(verbose) - self.combine = combine - self.universal_newlines = universal_newlines - if not match is None: - self.match_function = match - else: - self.match_function = match_re - if not diff is None: - self.diff_function = diff - else: - try: - difflib - except NameError: - pass - else: - self.diff_function = simple_diff - #self.diff_function = difflib.context_diff - #self.diff_function = difflib.unified_diff - self._dirlist = [] - self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0} - if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '': - self._preserve['pass_test'] = os.environ['PRESERVE'] - self._preserve['fail_test'] = os.environ['PRESERVE'] - self._preserve['no_result'] = os.environ['PRESERVE'] - else: - try: - self._preserve['pass_test'] = os.environ['PRESERVE_PASS'] - except KeyError: - pass - try: - self._preserve['fail_test'] = os.environ['PRESERVE_FAIL'] - except KeyError: - pass - try: - self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT'] - except KeyError: - pass - self._stdout = [] - self._stderr = [] - self.status = None - self.condition = 'no_result' - self.workdir_set(workdir) - self.subdir(subdir) - - def __del__(self): - self.cleanup() - - def __repr__(self): - return "%x" % id(self) - - banner_char = '=' - banner_width = 80 - - def banner(self, s, width=None): - if width is None: - width = self.banner_width - return s + self.banner_char * (width - len(s)) - - if os.name == 'posix': - - def escape(self, arg): - "escape shell special characters" - slash = '\\' - special = '"$' - - arg = string.replace(arg, slash, slash+slash) - for c in special: - arg = string.replace(arg, c, slash+c) - - if re_space.search(arg): - arg = '"' + arg + '"' - return arg - - else: - - # Windows does not allow special characters in file names - # anyway, so no need for an escape function, we will just quote - # the arg. - def escape(self, arg): - if re_space.search(arg): - arg = '"' + arg + '"' - return arg - - def canonicalize(self, path): - if is_List(path): - path = apply(os.path.join, tuple(path)) - if not os.path.isabs(path): - path = os.path.join(self.workdir, path) - return path - - def chmod(self, path, mode): - """Changes permissions on the specified file or directory - path name.""" - path = self.canonicalize(path) - os.chmod(path, mode) - - def cleanup(self, condition = None): - """Removes any temporary working directories for the specified - TestCmd environment. If the environment variable PRESERVE was - set when the TestCmd environment was created, temporary working - directories are not removed. If any of the environment variables - PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set - when the TestCmd environment was created, then temporary working - directories are not removed if the test passed, failed, or had - no result, respectively. Temporary working directories are also - preserved for conditions specified via the preserve method. - - Typically, this method is not called directly, but is used when - the script exits to clean up temporary working directories as - appropriate for the exit status. - """ - if not self._dirlist: - return - os.chdir(self._cwd) - self.workdir = None - if condition is None: - condition = self.condition - if self._preserve[condition]: - for dir in self._dirlist: - print "Preserved directory", dir - else: - list = self._dirlist[:] - list.reverse() - for dir in list: - self.writable(dir, 1) - shutil.rmtree(dir, ignore_errors = 1) - self._dirlist = [] - - try: - global _Cleanup - _Cleanup.remove(self) - except (AttributeError, ValueError): - pass - - def command_args(self, program = None, - interpreter = None, - arguments = None): - if program: - if type(program) == type('') and not os.path.isabs(program): - program = os.path.join(self._cwd, program) - else: - program = self.program - if not interpreter: - interpreter = self.interpreter - if not type(program) in [type([]), type(())]: - program = [program] - cmd = list(program) - if interpreter: - if not type(interpreter) in [type([]), type(())]: - interpreter = [interpreter] - cmd = list(interpreter) + cmd - if arguments: - if type(arguments) == type(''): - arguments = string.split(arguments) - cmd.extend(arguments) - return cmd - - def description_set(self, description): - """Set the description of the functionality being tested. - """ - self.description = description - - try: - difflib - except NameError: - def diff(self, a, b, name, *args, **kw): - print self.banner('Expected %s' % name) - print a - print self.banner('Actual %s' % name) - print b - else: - def diff(self, a, b, name, *args, **kw): - print self.banner(name) - args = (a.splitlines(), b.splitlines()) + args - lines = apply(self.diff_function, args, kw) - for l in lines: - print l - - def fail_test(self, condition = 1, function = None, skip = 0): - """Cause the test to fail. - """ - if not condition: - return - self.condition = 'fail_test' - fail_test(self = self, - condition = condition, - function = function, - skip = skip) - - def interpreter_set(self, interpreter): - """Set the program to be used to interpret the program - under test as a script. - """ - self.interpreter = interpreter - - def match(self, lines, matches): - """Compare actual and expected file contents. - """ - return self.match_function(lines, matches) - - def match_exact(self, lines, matches): - """Compare actual and expected file contents. - """ - return match_exact(lines, matches) - - def match_re(self, lines, res): - """Compare actual and expected file contents. - """ - return match_re(lines, res) - - def match_re_dotall(self, lines, res): - """Compare actual and expected file contents. - """ - return match_re_dotall(lines, res) - - def no_result(self, condition = 1, function = None, skip = 0): - """Report that the test could not be run. - """ - if not condition: - return - self.condition = 'no_result' - no_result(self = self, - condition = condition, - function = function, - skip = skip) - - def pass_test(self, condition = 1, function = None): - """Cause the test to pass. - """ - if not condition: - return - self.condition = 'pass_test' - pass_test(self = self, condition = condition, function = function) - - def preserve(self, *conditions): - """Arrange for the temporary working directories for the - specified TestCmd environment to be preserved for one or more - conditions. If no conditions are specified, arranges for - the temporary working directories to be preserved for all - conditions. - """ - if conditions is (): - conditions = ('pass_test', 'fail_test', 'no_result') - for cond in conditions: - self._preserve[cond] = 1 - - def program_set(self, program): - """Set the executable program or script to be tested. - """ - if program and not os.path.isabs(program): - program = os.path.join(self._cwd, program) - self.program = program - - def read(self, file, mode = 'rb'): - """Reads and returns the contents of the specified file name. - The file name may be a list, in which case the elements are - concatenated with the os.path.join() method. The file is - assumed to be under the temporary working directory unless it - is an absolute path name. The I/O mode for the file may - be specified; it must begin with an 'r'. The default is - 'rb' (binary read). - """ - file = self.canonicalize(file) - if mode[0] != 'r': - raise ValueError, "mode must begin with 'r'" - return open(file, mode).read() - - def rmdir(self, dir): - """Removes the specified dir name. - The dir name may be a list, in which case the elements are - concatenated with the os.path.join() method. The dir is - assumed to be under the temporary working directory unless it - is an absolute path name. - The dir must be empty. - """ - dir = self.canonicalize(dir) - os.rmdir(dir) - - def start(self, program = None, - interpreter = None, - arguments = None, - universal_newlines = None, - **kw): - """ - Starts a program or script for the test environment. - - The specified program will have the original directory - prepended unless it is enclosed in a [list]. - """ - cmd = self.command_args(program, interpreter, arguments) - cmd_string = string.join(map(self.escape, cmd), ' ') - if self.verbose: - sys.stderr.write(cmd_string + "\n") - if universal_newlines is None: - universal_newlines = self.universal_newlines - - # On Windows, if we make stdin a pipe when we plan to send - # no input, and the test program exits before - # Popen calls msvcrt.open_osfhandle, that call will fail. - # So don't use a pipe for stdin if we don't need one. - stdin = kw.get('stdin', None) - if stdin is not None: - stdin = subprocess.PIPE - - combine = kw.get('combine', self.combine) - if combine: - stderr_value = subprocess.STDOUT - else: - stderr_value = subprocess.PIPE - - return Popen(cmd, - stdin=stdin, - stdout=subprocess.PIPE, - stderr=stderr_value, - universal_newlines=universal_newlines) - - def finish(self, popen, **kw): - """ - Finishes and waits for the process being run under control of - the specified popen argument, recording the exit status, - standard output and error output. - """ - popen.stdin.close() - self.status = popen.wait() - if not self.status: - self.status = 0 - self._stdout.append(popen.stdout.read()) - if popen.stderr: - stderr = popen.stderr.read() - else: - stderr = '' - self._stderr.append(stderr) - - def run(self, program = None, - interpreter = None, - arguments = None, - chdir = None, - stdin = None, - universal_newlines = None): - """Runs a test of the program or script for the test - environment. Standard output and error output are saved for - future retrieval via the stdout() and stderr() methods. - - The specified program will have the original directory - prepended unless it is enclosed in a [list]. - """ - if chdir: - oldcwd = os.getcwd() - if not os.path.isabs(chdir): - chdir = os.path.join(self.workpath(chdir)) - if self.verbose: - sys.stderr.write("chdir(" + chdir + ")\n") - os.chdir(chdir) - p = self.start(program, - interpreter, - arguments, - universal_newlines, - stdin=stdin) - if stdin: - if is_List(stdin): - for line in stdin: - p.stdin.write(line) - else: - p.stdin.write(stdin) - p.stdin.close() - - out = p.stdout.read() - if p.stderr is None: - err = '' - else: - err = p.stderr.read() - try: - close_output = p.close_output - except AttributeError: - p.stdout.close() - if not p.stderr is None: - p.stderr.close() - else: - close_output() - - self._stdout.append(out) - self._stderr.append(err) - - self.status = p.wait() - if not self.status: - self.status = 0 - - if chdir: - os.chdir(oldcwd) - if self.verbose >= 2: - write = sys.stdout.write - write('============ STATUS: %d\n' % self.status) - out = self.stdout() - if out or self.verbose >= 3: - write('============ BEGIN STDOUT (len=%d):\n' % len(out)) - write(out) - write('============ END STDOUT\n') - err = self.stderr() - if err or self.verbose >= 3: - write('============ BEGIN STDERR (len=%d)\n' % len(err)) - write(err) - write('============ END STDERR\n') - - def sleep(self, seconds = default_sleep_seconds): - """Sleeps at least the specified number of seconds. If no - number is specified, sleeps at least the minimum number of - seconds necessary to advance file time stamps on the current - system. Sleeping more seconds is all right. - """ - time.sleep(seconds) - - def stderr(self, run = None): - """Returns the error output from the specified run number. - If there is no specified run number, then returns the error - output of the last run. If the run number is less than zero, - then returns the error output from that many runs back from the - current run. - """ - if not run: - run = len(self._stderr) - elif run < 0: - run = len(self._stderr) + run - run = run - 1 - return self._stderr[run] - - def stdout(self, run = None): - """Returns the standard output from the specified run number. - If there is no specified run number, then returns the standard - output of the last run. If the run number is less than zero, - then returns the standard output from that many runs back from - the current run. - """ - if not run: - run = len(self._stdout) - elif run < 0: - run = len(self._stdout) + run - run = run - 1 - return self._stdout[run] - - def subdir(self, *subdirs): - """Create new subdirectories under the temporary working - directory, one for each argument. An argument may be a list, - in which case the list elements are concatenated using the - os.path.join() method. Subdirectories multiple levels deep - must be created using a separate argument for each level: - - test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory']) - - Returns the number of subdirectories actually created. - """ - count = 0 - for sub in subdirs: - if sub is None: - continue - if is_List(sub): - sub = apply(os.path.join, tuple(sub)) - new = os.path.join(self.workdir, sub) - try: - os.mkdir(new) - except OSError: - pass - else: - count = count + 1 - return count - - def symlink(self, target, link): - """Creates a symlink to the specified target. - The link name may be a list, in which case the elements are - concatenated with the os.path.join() method. The link is - assumed to be under the temporary working directory unless it - is an absolute path name. The target is *not* assumed to be - under the temporary working directory. - """ - link = self.canonicalize(link) - os.symlink(target, link) - - def tempdir(self, path=None): - """Creates a temporary directory. - A unique directory name is generated if no path name is specified. - The directory is created, and will be removed when the TestCmd - object is destroyed. - """ - if path is None: - try: - path = tempfile.mktemp(prefix=tempfile.template) - except TypeError: - path = tempfile.mktemp() - os.mkdir(path) - - # Symlinks in the path will report things - # differently from os.getcwd(), so chdir there - # and back to fetch the canonical path. - cwd = os.getcwd() - try: - os.chdir(path) - path = os.getcwd() - finally: - os.chdir(cwd) - - # Uppercase the drive letter since the case of drive - # letters is pretty much random on win32: - drive,rest = os.path.splitdrive(path) - if drive: - path = string.upper(drive) + rest - - # - self._dirlist.append(path) - global _Cleanup - try: - _Cleanup.index(self) - except ValueError: - _Cleanup.append(self) - - return path - - def touch(self, path, mtime=None): - """Updates the modification time on the specified file or - directory path name. The default is to update to the - current time if no explicit modification time is specified. - """ - path = self.canonicalize(path) - atime = os.path.getatime(path) - if mtime is None: - mtime = time.time() - os.utime(path, (atime, mtime)) - - def unlink(self, file): - """Unlinks the specified file name. - The file name may be a list, in which case the elements are - concatenated with the os.path.join() method. The file is - assumed to be under the temporary working directory unless it - is an absolute path name. - """ - file = self.canonicalize(file) - os.unlink(file) - - def verbose_set(self, verbose): - """Set the verbose level. - """ - self.verbose = verbose - - def where_is(self, file, path=None, pathext=None): - """Find an executable file. - """ - if is_List(file): - file = apply(os.path.join, tuple(file)) - if not os.path.isabs(file): - file = where_is(file, path, pathext) - return file - - def workdir_set(self, path): - """Creates a temporary working directory with the specified - path name. If the path is a null string (''), a unique - directory name is created. - """ - if (path != None): - if path == '': - path = None - path = self.tempdir(path) - self.workdir = path - - def workpath(self, *args): - """Returns the absolute path name to a subdirectory or file - within the current temporary working directory. Concatenates - the temporary working directory name with the specified - arguments using the os.path.join() method. - """ - return apply(os.path.join, (self.workdir,) + tuple(args)) - - def readable(self, top, read=1): - """Make the specified directory tree readable (read == 1) - or not (read == None). - - This method has no effect on Windows systems, which use a - completely different mechanism to control file readability. - """ - - if sys.platform == 'win32': - return - - if read: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD)) - - if os.path.isfile(top): - # If it's a file, that's easy, just chmod it. - do_chmod(top) - elif read: - # It's a directory and we're trying to turn on read - # permission, so it's also pretty easy, just chmod the - # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable - # read permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. - do_chmod(top) - - def chmod_entries(arg, dirname, names, do_chmod=do_chmod): - for n in names: - do_chmod(os.path.join(dirname, n)) - - os.path.walk(top, chmod_entries, None) - else: - # It's a directory and we're trying to turn off read - # permission, which means we have to chmod the directoreis - # in the tree bottom-up, lest disabling read permission from - # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top - # down, so we just use an object to collect a list of all - # of the entries in the tree, reverse the list, and then - # chmod the reversed (bottom-up) list. - col = Collector(top) - os.path.walk(top, col, None) - col.entries.reverse() - for d in col.entries: do_chmod(d) - - def writable(self, top, write=1): - """Make the specified directory tree writable (write == 1) - or not (write == None). - """ - - if sys.platform == 'win32': - - if write: - def do_chmod(fname): - try: os.chmod(fname, stat.S_IWRITE) - except OSError: pass - else: - def do_chmod(fname): - try: os.chmod(fname, stat.S_IREAD) - except OSError: pass - - else: - - if write: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200)) - - if os.path.isfile(top): - do_chmod(top) - else: - col = Collector(top) - os.path.walk(top, col, None) - for d in col.entries: do_chmod(d) - - def executable(self, top, execute=1): - """Make the specified directory tree executable (execute == 1) - or not (execute == None). - - This method has no effect on Windows systems, which use a - completely different mechanism to control file executability. - """ - - if sys.platform == 'win32': - return - - if execute: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC)) - - if os.path.isfile(top): - # If it's a file, that's easy, just chmod it. - do_chmod(top) - elif execute: - # It's a directory and we're trying to turn on execute - # permission, so it's also pretty easy, just chmod the - # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable - # execute permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. - do_chmod(top) - - def chmod_entries(arg, dirname, names, do_chmod=do_chmod): - for n in names: - do_chmod(os.path.join(dirname, n)) - - os.path.walk(top, chmod_entries, None) - else: - # It's a directory and we're trying to turn off execute - # permission, which means we have to chmod the directories - # in the tree bottom-up, lest disabling execute permission from - # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top - # down, so we just use an object to collect a list of all - # of the entries in the tree, reverse the list, and then - # chmod the reversed (bottom-up) list. - col = Collector(top) - os.path.walk(top, col, None) - col.entries.reverse() - for d in col.entries: do_chmod(d) - - def write(self, file, content, mode = 'wb'): - """Writes the specified content text (second argument) to the - specified file name (first argument). The file name may be - a list, in which case the elements are concatenated with the - os.path.join() method. The file is created under the temporary - working directory. Any subdirectories in the path must already - exist. The I/O mode for the file may be specified; it must - begin with a 'w'. The default is 'wb' (binary write). - """ - file = self.canonicalize(file) - if mode[0] != 'w': - raise ValueError, "mode must begin with 'w'" - open(file, mode).write(content) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/mozc_build_tools/gyp/test/lib/TestCommon.py b/mozc_build_tools/gyp/test/lib/TestCommon.py deleted file mode 100644 index 4aa7185..0000000 --- a/mozc_build_tools/gyp/test/lib/TestCommon.py +++ /dev/null @@ -1,581 +0,0 @@ -""" -TestCommon.py: a testing framework for commands and scripts - with commonly useful error handling - -The TestCommon module provides a simple, high-level interface for writing -tests of executable commands and scripts, especially commands and scripts -that interact with the file system. All methods throw exceptions and -exit on failure, with useful error messages. This makes a number of -explicit checks unnecessary, making the test scripts themselves simpler -to write and easier to read. - -The TestCommon class is a subclass of the TestCmd class. In essence, -TestCommon is a wrapper that handles common TestCmd error conditions in -useful ways. You can use TestCommon directly, or subclass it for your -program and add additional (or override) methods to tailor it to your -program's specific needs. Alternatively, the TestCommon class serves -as a useful example of how to define your own TestCmd subclass. - -As a subclass of TestCmd, TestCommon provides access to all of the -variables and methods from the TestCmd module. Consequently, you can -use any variable or method documented in the TestCmd module without -having to explicitly import TestCmd. - -A TestCommon environment object is created via the usual invocation: - - import TestCommon - test = TestCommon.TestCommon() - -You can use all of the TestCmd keyword arguments when instantiating a -TestCommon object; see the TestCmd documentation for details. - -Here is an overview of the methods and keyword arguments that are -provided by the TestCommon class: - - test.must_be_writable('file1', ['file2', ...]) - - test.must_contain('file', 'required text\n') - - test.must_contain_all_lines(output, lines, ['title', find]) - - test.must_contain_any_line(output, lines, ['title', find]) - - test.must_exist('file1', ['file2', ...]) - - test.must_match('file', "expected contents\n") - - test.must_not_be_writable('file1', ['file2', ...]) - - test.must_not_contain('file', 'banned text\n') - - test.must_not_contain_any_line(output, lines, ['title', find]) - - test.must_not_exist('file1', ['file2', ...]) - - test.run(options = "options to be prepended to arguments", - stdout = "expected standard output from the program", - stderr = "expected error output from the program", - status = expected_status, - match = match_function) - -The TestCommon module also provides the following variables - - TestCommon.python_executable - TestCommon.exe_suffix - TestCommon.obj_suffix - TestCommon.shobj_prefix - TestCommon.shobj_suffix - TestCommon.lib_prefix - TestCommon.lib_suffix - TestCommon.dll_prefix - TestCommon.dll_suffix - -""" - -# Copyright 2000-2010 Steven Knight -# This module is free software, and you may redistribute it and/or modify -# it under the same terms as Python itself, so long as this copyright message -# and disclaimer are retained in their original form. -# -# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, -# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF -# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -# DAMAGE. -# -# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, -# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, -# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - -__author__ = "Steven Knight " -__revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight" -__version__ = "0.37" - -import copy -import os -import os.path -import stat -import string -import sys -import types -import UserList - -from TestCmd import * -from TestCmd import __all__ - -__all__.extend([ 'TestCommon', - 'exe_suffix', - 'obj_suffix', - 'shobj_prefix', - 'shobj_suffix', - 'lib_prefix', - 'lib_suffix', - 'dll_prefix', - 'dll_suffix', - ]) - -# Variables that describe the prefixes and suffixes on this system. -if sys.platform == 'win32': - exe_suffix = '.exe' - obj_suffix = '.obj' - shobj_suffix = '.obj' - shobj_prefix = '' - lib_prefix = '' - lib_suffix = '.lib' - dll_prefix = '' - dll_suffix = '.dll' -elif sys.platform == 'cygwin': - exe_suffix = '.exe' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = '' - dll_suffix = '.dll' -elif string.find(sys.platform, 'irix') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.o' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.so' -elif string.find(sys.platform, 'darwin') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.dylib' -elif string.find(sys.platform, 'sunos') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = 'so_' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.dylib' -else: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.so' - -def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) - -def is_writable(f): - mode = os.stat(f)[stat.ST_MODE] - return mode & stat.S_IWUSR - -def separate_files(flist): - existing = [] - missing = [] - for f in flist: - if os.path.exists(f): - existing.append(f) - else: - missing.append(f) - return existing, missing - -if os.name == 'posix': - def _failed(self, status = 0): - if self.status is None or status is None: - return None - return _status(self) != status - def _status(self): - return self.status -elif os.name == 'nt': - def _failed(self, status = 0): - return not (self.status is None or status is None) and \ - self.status != status - def _status(self): - return self.status - -class TestCommon(TestCmd): - - # Additional methods from the Perl Test::Cmd::Common module - # that we may wish to add in the future: - # - # $test->subdir('subdir', ...); - # - # $test->copy('src_file', 'dst_file'); - - def __init__(self, **kw): - """Initialize a new TestCommon instance. This involves just - calling the base class initialization, and then changing directory - to the workdir. - """ - apply(TestCmd.__init__, [self], kw) - os.chdir(self.workdir) - - def must_be_writable(self, *files): - """Ensures that the specified file(s) exist and are writable. - An individual file can be specified as a list of directory names, - in which case the pathname will be constructed by concatenating - them. Exits FAILED if any of the files does not exist or is - not writable. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing, missing = separate_files(files) - unwritable = filter(lambda x, iw=is_writable: not iw(x), existing) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - if unwritable: - print "Unwritable files: `%s'" % string.join(unwritable, "', `") - self.fail_test(missing + unwritable) - - def must_contain(self, file, required, mode = 'rb'): - """Ensures that the specified file contains the required text. - """ - file_contents = self.read(file, mode) - contains = (string.find(file_contents, required) != -1) - if not contains: - print "File `%s' does not contain required string." % file - print self.banner('Required string ') - print required - print self.banner('%s contents ' % file) - print file_contents - self.fail_test(not contains) - - def must_contain_all_lines(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - contains all of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - missing = [] - for line in lines: - if not find(output, line): - missing.append(line) - - if missing: - if title is None: - title = 'output' - sys.stdout.write("Missing expected lines from %s:\n" % title) - for line in missing: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_contain_any_line(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - contains at least one of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - for line in lines: - if find(output, line): - return - - if title is None: - title = 'output' - sys.stdout.write("Missing any expected line from %s:\n" % title) - for line in lines: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_contain_lines(self, lines, output, title=None): - # Deprecated; retain for backwards compatibility. - return self.must_contain_all_lines(output, lines, title) - - def must_exist(self, *files): - """Ensures that the specified file(s) must exist. An individual - file be specified as a list of directory names, in which case the - pathname will be constructed by concatenating them. Exits FAILED - if any of the files does not exist. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - missing = filter(lambda x: not os.path.exists(x), files) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - self.fail_test(missing) - - def must_match(self, file, expect, mode = 'rb'): - """Matches the contents of the specified file (first argument) - against the expected contents (second argument). The expected - contents are a list of lines or a string which will be split - on newlines. - """ - file_contents = self.read(file, mode) - try: - self.fail_test(not self.match(file_contents, expect)) - except KeyboardInterrupt: - raise - except: - print "Unexpected contents of `%s'" % file - self.diff(expect, file_contents, 'contents ') - raise - - def must_not_contain(self, file, banned, mode = 'rb'): - """Ensures that the specified file doesn't contain the banned text. - """ - file_contents = self.read(file, mode) - contains = (string.find(file_contents, banned) != -1) - if contains: - print "File `%s' contains banned string." % file - print self.banner('Banned string ') - print banned - print self.banner('%s contents ' % file) - print file_contents - self.fail_test(contains) - - def must_not_contain_any_line(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - does not contain any of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - unexpected = [] - for line in lines: - if find(output, line): - unexpected.append(line) - - if unexpected: - if title is None: - title = 'output' - sys.stdout.write("Unexpected lines in %s:\n" % title) - for line in unexpected: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_not_contain_lines(self, lines, output, title=None): - return self.must_not_contain_any_line(output, lines, title) - - def must_not_exist(self, *files): - """Ensures that the specified file(s) must not exist. - An individual file be specified as a list of directory names, in - which case the pathname will be constructed by concatenating them. - Exits FAILED if any of the files exists. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing = filter(os.path.exists, files) - if existing: - print "Unexpected files exist: `%s'" % string.join(existing, "', `") - self.fail_test(existing) - - - def must_not_be_writable(self, *files): - """Ensures that the specified file(s) exist and are not writable. - An individual file can be specified as a list of directory names, - in which case the pathname will be constructed by concatenating - them. Exits FAILED if any of the files does not exist or is - writable. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing, missing = separate_files(files) - writable = filter(is_writable, existing) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - if writable: - print "Writable files: `%s'" % string.join(writable, "', `") - self.fail_test(missing + writable) - - def _complete(self, actual_stdout, expected_stdout, - actual_stderr, expected_stderr, status, match): - """ - Post-processes running a subcommand, checking for failure - status and displaying output appropriately. - """ - if _failed(self, status): - expect = '' - if status != 0: - expect = " (expected %s)" % str(status) - print "%s returned %s%s" % (self.program, str(_status(self)), expect) - print self.banner('STDOUT ') - print actual_stdout - print self.banner('STDERR ') - print actual_stderr - self.fail_test() - if not expected_stdout is None and not match(actual_stdout, expected_stdout): - self.diff(expected_stdout, actual_stdout, 'STDOUT ') - if actual_stderr: - print self.banner('STDERR ') - print actual_stderr - self.fail_test() - if not expected_stderr is None and not match(actual_stderr, expected_stderr): - print self.banner('STDOUT ') - print actual_stdout - self.diff(expected_stderr, actual_stderr, 'STDERR ') - self.fail_test() - - def start(self, program = None, - interpreter = None, - arguments = None, - universal_newlines = None, - **kw): - """ - Starts a program or script for the test environment. - - This handles the "options" keyword argument and exceptions. - """ - try: - options = kw['options'] - del kw['options'] - except KeyError: - pass - else: - if options: - if arguments is None: - arguments = options - else: - arguments = options + " " + arguments - try: - return apply(TestCmd.start, - (self, program, interpreter, arguments, universal_newlines), - kw) - except KeyboardInterrupt: - raise - except Exception, e: - print self.banner('STDOUT ') - try: - print self.stdout() - except IndexError: - pass - print self.banner('STDERR ') - try: - print self.stderr() - except IndexError: - pass - cmd_args = self.command_args(program, interpreter, arguments) - sys.stderr.write('Exception trying to execute: %s\n' % cmd_args) - raise e - - def finish(self, popen, stdout = None, stderr = '', status = 0, **kw): - """ - Finishes and waits for the process being run under control of - the specified popen argument. Additional arguments are similar - to those of the run() method: - - stdout The expected standard output from - the command. A value of None means - don't test standard output. - - stderr The expected error output from - the command. A value of None means - don't test error output. - - status The expected exit status from the - command. A value of None means don't - test exit status. - """ - apply(TestCmd.finish, (self, popen,), kw) - match = kw.get('match', self.match) - self._complete(self.stdout(), stdout, - self.stderr(), stderr, status, match) - - def run(self, options = None, arguments = None, - stdout = None, stderr = '', status = 0, **kw): - """Runs the program under test, checking that the test succeeded. - - The arguments are the same as the base TestCmd.run() method, - with the addition of: - - options Extra options that get appended to the beginning - of the arguments. - - stdout The expected standard output from - the command. A value of None means - don't test standard output. - - stderr The expected error output from - the command. A value of None means - don't test error output. - - status The expected exit status from the - command. A value of None means don't - test exit status. - - By default, this expects a successful exit (status = 0), does - not test standard output (stdout = None), and expects that error - output is empty (stderr = ""). - """ - if options: - if arguments is None: - arguments = options - else: - arguments = options + " " + arguments - kw['arguments'] = arguments - try: - match = kw['match'] - del kw['match'] - except KeyError: - match = self.match - apply(TestCmd.run, [self], kw) - self._complete(self.stdout(), stdout, - self.stderr(), stderr, status, match) - - def skip_test(self, message="Skipping test.\n"): - """Skips a test. - - Proper test-skipping behavior is dependent on the external - TESTCOMMON_PASS_SKIPS environment variable. If set, we treat - the skip as a PASS (exit 0), and otherwise treat it as NO RESULT. - In either case, we print the specified message as an indication - that the substance of the test was skipped. - - (This was originally added to support development under Aegis. - Technically, skipping a test is a NO RESULT, but Aegis would - treat that as a test failure and prevent the change from going to - the next step. Since we ddn't want to force anyone using Aegis - to have to install absolutely every tool used by the tests, we - would actually report to Aegis that a skipped test has PASSED - so that the workflow isn't held up.) - """ - if message: - sys.stdout.write(message) - sys.stdout.flush() - pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS') - if pass_skips in [None, 0, '0']: - # skip=1 means skip this function when showing where this - # result came from. They only care about the line where the - # script called test.skip_test(), not the line number where - # we call test.no_result(). - self.no_result(skip=1) - else: - # We're under the development directory for this change, - # so this is an Aegis invocation; pass the test (exit 0). - self.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/mozc_build_tools/gyp/test/lib/TestGyp.py b/mozc_build_tools/gyp/test/lib/TestGyp.py deleted file mode 100644 index 81e170c..0000000 --- a/mozc_build_tools/gyp/test/lib/TestGyp.py +++ /dev/null @@ -1,686 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -TestGyp.py: a testing framework for GYP integration tests. -""" - -import os -import re -import shutil -import stat -import sys - -import TestCommon -from TestCommon import __all__ - -__all__.extend([ - 'TestGyp', -]) - - -class TestGypBase(TestCommon.TestCommon): - """ - Class for controlling end-to-end tests of gyp generators. - - Instantiating this class will create a temporary directory and - arrange for its destruction (via the TestCmd superclass) and - copy all of the non-gyptest files in the directory hierarchy of the - executing script. - - The default behavior is to test the 'gyp' or 'gyp.bat' file in the - current directory. An alternative may be specified explicitly on - instantiation, or by setting the TESTGYP_GYP environment variable. - - This class should be subclassed for each supported gyp generator - (format). Various abstract methods below define calling signatures - used by the test scripts to invoke builds on the generated build - configuration and to run executables generated by those builds. - """ - - build_tool = None - build_tool_list = [] - - _exe = TestCommon.exe_suffix - _obj = TestCommon.obj_suffix - shobj_ = TestCommon.shobj_prefix - _shobj = TestCommon.shobj_suffix - lib_ = TestCommon.lib_prefix - _lib = TestCommon.lib_suffix - dll_ = TestCommon.dll_prefix - _dll = TestCommon.dll_suffix - - # Constants to represent different targets. - ALL = '__all__' - DEFAULT = '__default__' - - # Constants for different target types. - EXECUTABLE = '__executable__' - STATIC_LIB = '__static_lib__' - SHARED_LIB = '__shared_lib__' - - def __init__(self, gyp=None, *args, **kw): - self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0])) - - if not gyp: - gyp = os.environ.get('TESTGYP_GYP') - if not gyp: - if sys.platform == 'win32': - gyp = 'gyp.bat' - else: - gyp = 'gyp' - self.gyp = os.path.abspath(gyp) - - self.initialize_build_tool() - - if not kw.has_key('match'): - kw['match'] = TestCommon.match_exact - - if not kw.has_key('workdir'): - # Default behavior: the null string causes TestCmd to create - # a temporary directory for us. - kw['workdir'] = '' - - formats = kw.get('formats', []) - if kw.has_key('formats'): - del kw['formats'] - - super(TestGypBase, self).__init__(*args, **kw) - - excluded_formats = set([f for f in formats if f[0] == '!']) - included_formats = set(formats) - excluded_formats - if ('!'+self.format in excluded_formats or - included_formats and self.format not in included_formats): - msg = 'Invalid test for %r format; skipping test.\n' - self.skip_test(msg % self.format) - - self.copy_test_configuration(self.origin_cwd, self.workdir) - self.set_configuration(None) - - def built_file_must_exist(self, name, type=None, **kw): - """ - Fails the test if the specified built file name does not exist. - """ - return self.must_exist(self.built_file_path(name, type, **kw)) - - def built_file_must_not_exist(self, name, type=None, **kw): - """ - Fails the test if the specified built file name exists. - """ - return self.must_not_exist(self.built_file_path(name, type, **kw)) - - def built_file_must_match(self, name, contents, **kw): - """ - Fails the test if the contents of the specified built file name - do not match the specified contents. - """ - return self.must_match(self.built_file_path(name, **kw), contents) - - def built_file_must_not_match(self, name, contents, **kw): - """ - Fails the test if the contents of the specified built file name - match the specified contents. - """ - return self.must_not_match(self.built_file_path(name, **kw), contents) - - def copy_test_configuration(self, source_dir, dest_dir): - """ - Copies the test configuration from the specified source_dir - (the directory in which the test script lives) to the - specified dest_dir (a temporary working directory). - - This ignores all files and directories that begin with - the string 'gyptest', and all '.svn' subdirectories. - """ - for root, dirs, files in os.walk(source_dir): - if '.svn' in dirs: - dirs.remove('.svn') - dirs = [ d for d in dirs if not d.startswith('gyptest') ] - files = [ f for f in files if not f.startswith('gyptest') ] - for dirname in dirs: - source = os.path.join(root, dirname) - destination = source.replace(source_dir, dest_dir) - os.mkdir(destination) - if sys.platform != 'win32': - shutil.copystat(source, destination) - for filename in files: - source = os.path.join(root, filename) - destination = source.replace(source_dir, dest_dir) - shutil.copy2(source, destination) - - def initialize_build_tool(self): - """ - Initializes the .build_tool attribute. - - Searches the .build_tool_list for an executable name on the user's - $PATH. The first tool on the list is used as-is if nothing is found - on the current $PATH. - """ - for build_tool in self.build_tool_list: - if not build_tool: - continue - if os.path.isabs(build_tool): - self.build_tool = build_tool - return - build_tool = self.where_is(build_tool) - if build_tool: - self.build_tool = build_tool - return - - if self.build_tool_list: - self.build_tool = self.build_tool_list[0] - - def relocate(self, source, destination): - """ - Renames (relocates) the specified source (usually a directory) - to the specified destination, creating the destination directory - first if necessary. - - Note: Don't use this as a generic "rename" operation. In the - future, "relocating" parts of a GYP tree may affect the state of - the test to modify the behavior of later method calls. - """ - destination_dir = os.path.dirname(destination) - if not os.path.exists(destination_dir): - self.subdir(destination_dir) - os.rename(source, destination) - - def report_not_up_to_date(self): - """ - Reports that a build is not up-to-date. - - This provides common reporting for formats that have complicated - conditions for checking whether a build is up-to-date. Formats - that expect exact output from the command (make, scons) can - just set stdout= when they call the run_build() method. - """ - print "Build is not up-to-date:" - print self.banner('STDOUT ') - print self.stdout() - stderr = self.stderr() - if stderr: - print self.banner('STDERR ') - print stderr - - def run_gyp(self, gyp_file, *args, **kw): - """ - Runs gyp against the specified gyp_file with the specified args. - """ - # TODO: --depth=. works around Chromium-specific tree climbing. - args = ('--depth=.', '--format='+self.format, gyp_file) + args - return self.run(program=self.gyp, arguments=args, **kw) - - def run(self, *args, **kw): - """ - Executes a program by calling the superclass .run() method. - - This exists to provide a common place to filter out keyword - arguments implemented in this layer, without having to update - the tool-specific subclasses or clutter the tests themselves - with platform-specific code. - """ - if kw.has_key('SYMROOT'): - del kw['SYMROOT'] - super(TestGypBase, self).run(*args, **kw) - - def set_configuration(self, configuration): - """ - Sets the configuration, to be used for invoking the build - tool and testing potential built output. - """ - self.configuration = configuration - - def configuration_dirname(self): - if self.configuration: - return self.configuration.split('|')[0] - else: - return 'Default' - - def configuration_buildname(self): - if self.configuration: - return self.configuration - else: - return 'Default' - - # - # Abstract methods to be defined by format-specific subclasses. - # - - def build(self, gyp_file, target=None, **kw): - """ - Runs a build of the specified target against the configuration - generated from the specified gyp_file. - - A 'target' argument of None or the special value TestGyp.DEFAULT - specifies the default argument for the underlying build tool. - A 'target' argument of TestGyp.ALL specifies the 'all' target - (if any) of the underlying build tool. - """ - raise NotImplementedError - - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type. - """ - raise NotImplementedError - - def built_file_basename(self, name, type=None, **kw): - """ - Returns the base name of the specified file name, of the specified type. - - A bare=True keyword argument specifies that prefixes and suffixes shouldn't - be applied. - """ - if not kw.get('bare'): - if type == self.EXECUTABLE: - name = name + self._exe - elif type == self.STATIC_LIB: - name = self.lib_ + name + self._lib - elif type == self.SHARED_LIB: - name = self.dll_ + name + self._dll - return name - - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable program built from a gyp-generated configuration. - - The specified name should be independent of any particular generator. - Subclasses should find the output executable in the appropriate - output build directory, tack on any necessary executable suffix, etc. - """ - raise NotImplementedError - - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified target is up to date. - - The subclass should implement this by calling build() - (or a reasonable equivalent), checking whatever conditions - will tell it the build was an "up to date" null build, and - failing if it isn't. - """ - raise NotImplementedError - - -class TestGypGypd(TestGypBase): - """ - Subclass for testing the GYP 'gypd' generator (spit out the - internal data structure as pretty-printed Python). - """ - format = 'gypd' - - -class TestGypMake(TestGypBase): - """ - Subclass for testing the GYP Make generator. - """ - format = 'make' - build_tool_list = ['make'] - ALL = 'all' - def build(self, gyp_file, target=None, **kw): - """ - Runs a Make build using the Makefiles generated from the specified - gyp_file. - """ - arguments = kw.get('arguments', [])[:] - if self.configuration: - arguments.append('BUILDTYPE=' + self.configuration) - if target not in (None, self.DEFAULT): - arguments.append(target) - # Sub-directory builds provide per-gyp Makefiles (i.e. - # Makefile.gyp_filename), so use that if there is no Makefile. - chdir = kw.get('chdir', '') - if not os.path.exists(os.path.join(chdir, 'Makefile')): - print "NO Makefile in " + os.path.join(chdir, 'Makefile') - arguments.insert(0, '-f') - arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile') - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Make target is up to date. - """ - if target in (None, self.DEFAULT): - message_target = 'all' - else: - message_target = target - kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target - return self.build(gyp_file, target, **kw) - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by Make. - """ - configuration = self.configuration_dirname() - libdir = os.path.join('out', configuration, 'lib') - # TODO(piman): when everything is cross-compile safe, remove lib.target - os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target' - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Make. - - Built files are in the subdirectory 'out/{configuration}'. - The default is 'out/Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - - A libdir= keyword argument specifies a library subdirectory other - than the default 'obj.target'. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - configuration = self.configuration_dirname() - result.extend(['out', configuration]) - if type == self.STATIC_LIB: - result.append(kw.get('libdir', 'obj.target')) - elif type == self.SHARED_LIB: - result.append(kw.get('libdir', 'lib.target')) - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypMSVS(TestGypBase): - """ - Subclass for testing the GYP Visual Studio generator. - """ - format = 'msvs' - - u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ===' - up_to_date_re = re.compile(u, re.M) - - # Initial None element will indicate to our .initialize_build_tool() - # method below that 'devenv' was not found on %PATH%. - # - # Note: we must use devenv.com to be able to capture build output. - # Directly executing devenv.exe only sends output to BuildLog.htm. - build_tool_list = [None, 'devenv.com'] - - def initialize_build_tool(self): - """ - Initializes the Visual Studio .build_tool parameter, searching %PATH% - and %PATHEXT% for a devenv.{exe,bat,...} executable, and falling - back to a hard-coded default (on the current drive) if necessary. - """ - super(TestGypMSVS, self).initialize_build_tool() - if not self.build_tool: - # We didn't find 'devenv' on the path. Just hard-code a default, - # and revisit this if it becomes important. - possible = [ - # Note: if you're using this, set GYP_MSVS_VERSION=2008 - # to get the tests to pass. - ('C:\\Program Files (x86)', - 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files', - 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files (x86)', - 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files', - 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'), - ] - for build_tool in possible: - bt = os.path.join(*build_tool) - if os.path.exists(bt): - self.build_tool = bt - break - def build(self, gyp_file, target=None, rebuild=False, **kw): - """ - Runs a Visual Studio build using the configuration generated - from the specified gyp_file. - """ - configuration = self.configuration_buildname() - if rebuild: - build = '/Rebuild' - else: - build = '/Build' - arguments = kw.get('arguments', [])[:] - arguments.extend([gyp_file.replace('.gyp', '.sln'), - build, configuration]) - # Note: the Visual Studio generator doesn't add an explicit 'all' - # target, so we just treat it the same as the default. - if target not in (None, self.ALL, self.DEFAULT): - arguments.extend(['/Project', target]) - if self.configuration: - arguments.extend(['/ProjectConfig', self.configuration]) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Visual Studio target is up to date. - """ - result = self.build(gyp_file, target, **kw) - if not result: - stdout = self.stdout() - m = self.up_to_date_re.search(stdout) - if not m or m.group(1) == '0': - self.report_not_up_to_date() - self.fail_test() - return result - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by Visual Studio. - """ - configuration = self.configuration_dirname() - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Visual Studio. - - Built files are in a subdirectory that matches the configuration - name. The default is 'Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - result.append(self.configuration_dirname()) - if type == self.STATIC_LIB: - result.append('lib') - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypSCons(TestGypBase): - """ - Subclass for testing the GYP SCons generator. - """ - format = 'scons' - build_tool_list = ['scons', 'scons.py'] - ALL = 'all' - def build(self, gyp_file, target=None, **kw): - """ - Runs a scons build using the SCons configuration generated from the - specified gyp_file. - """ - arguments = kw.get('arguments', [])[:] - dirname = os.path.dirname(gyp_file) - if dirname: - arguments.extend(['-C', dirname]) - if self.configuration: - arguments.append('--mode=' + self.configuration) - if target not in (None, self.DEFAULT): - arguments.append(target) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified SCons target is up to date. - """ - if target in (None, self.DEFAULT): - up_to_date_targets = 'all' - else: - up_to_date_targets = target - up_to_date_lines = [] - for arg in up_to_date_targets.split(): - up_to_date_lines.append("scons: `%s' is up to date.\n" % arg) - kw['stdout'] = ''.join(up_to_date_lines) - arguments = kw.get('arguments', [])[:] - arguments.append('-Q') - kw['arguments'] = arguments - return self.build(gyp_file, target, **kw) - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by scons. - """ - configuration = self.configuration_dirname() - os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib') - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Scons. - - Built files are in a subdirectory that matches the configuration - name. The default is 'Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - result.append(self.configuration_dirname()) - if type in (self.STATIC_LIB, self.SHARED_LIB): - result.append('lib') - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypXcode(TestGypBase): - """ - Subclass for testing the GYP Xcode generator. - """ - format = 'xcode' - build_tool_list = ['xcodebuild'] - - phase_script_execution = ("\n" - "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n" - " cd /\\S+\n" - " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n" - "(make: Nothing to be done for `all'\\.\n)?") - - strip_up_to_date_expressions = [ - # Various actions or rules can run even when the overall build target - # is up to date. Strip those phases' GYP-generated output. - re.compile(phase_script_execution, re.S), - - # The message from distcc_pump can trail the "BUILD SUCCEEDED" - # message, so strip that, too. - re.compile('__________Shutting down distcc-pump include server\n', re.S), - ] - - up_to_date_ending = 'Checking Dependencies...\n** BUILD SUCCEEDED **\n' - - def build(self, gyp_file, target=None, **kw): - """ - Runs an xcodebuild using the .xcodeproj generated from the specified - gyp_file. - """ - # Be sure we're working with a copy of 'arguments' since we modify it. - # The caller may not be expecting it to be modified. - arguments = kw.get('arguments', [])[:] - arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')]) - if target == self.ALL: - arguments.append('-alltargets',) - elif target not in (None, self.DEFAULT): - arguments.extend(['-target', target]) - if self.configuration: - arguments.extend(['-configuration', self.configuration]) - symroot = kw.get('SYMROOT', '$SRCROOT/build') - if symroot: - arguments.append('SYMROOT='+symroot) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Xcode target is up to date. - """ - result = self.build(gyp_file, target, **kw) - if not result: - output = self.stdout() - for expression in self.strip_up_to_date_expressions: - output = expression.sub('', output) - if not output.endswith(self.up_to_date_ending): - self.report_not_up_to_date() - self.fail_test() - return result - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by xcodebuild. - """ - configuration = self.configuration_dirname() - os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration) - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Xcode. - - Built files are in the subdirectory 'build/{configuration}'. - The default is 'build/Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - configuration = self.configuration_dirname() - result.extend(['build', configuration]) - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -format_class_list = [ - TestGypGypd, - TestGypMake, - TestGypMSVS, - TestGypSCons, - TestGypXcode, -] - -def TestGyp(*args, **kw): - """ - Returns an appropriate TestGyp* instance for a specified GYP format. - """ - format = kw.get('format') - if format: - del kw['format'] - else: - format = os.environ.get('TESTGYP_FORMAT') - for format_class in format_class_list: - if format == format_class.format: - return format_class(*args, **kw) - raise Exception, "unknown format %r" % format diff --git a/mozc_build_tools/gyp/test/library/gyptest-shared.py b/mozc_build_tools/gyp/test/library/gyptest-shared.py deleted file mode 100644 index a1d2985..0000000 --- a/mozc_build_tools/gyp/test/library/gyptest-shared.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with shared libraries, -including verifying that libraries are rebuilt correctly when functions -move between libraries. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib1', - chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib2', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('Hello', 'Hello again') -test.write('relocate/src/program.c', contents) - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib2_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib1', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('again', 'again again') -test.write('relocate/src/program.c', contents) - -# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out -# the "moved" module. This should be done in gyp by adding a dependency -# on the generated .vcproj file itself. -test.touch('relocate/src/lib2.c') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/library/gyptest-static.py b/mozc_build_tools/gyp/test/library/gyptest-static.py deleted file mode 100644 index 4bc71c4..0000000 --- a/mozc_build_tools/gyp/test/library/gyptest-static.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with static libraries, -including verifying that libraries are rebuilt correctly when functions -move between libraries. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib1', - chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib2', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('Hello', 'Hello again') -test.write('relocate/src/program.c', contents) - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib2_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib1', - chdir='relocate/src') - -# Update program.c and lib2.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('again', 'again again') -test.write('relocate/src/program.c', contents) - -# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out -# the "moved" module. This should be done in gyp by adding a dependency -# on the generated .vcproj file itself. -test.touch('relocate/src/lib2.c') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/library/src/lib1.c b/mozc_build_tools/gyp/test/library/src/lib1.c deleted file mode 100644 index 3866b1b..0000000 --- a/mozc_build_tools/gyp/test/library/src/lib1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void lib1_function(void) -{ - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/library/src/lib1_moveable.c b/mozc_build_tools/gyp/test/library/src/lib1_moveable.c deleted file mode 100644 index 5d3cc1d..0000000 --- a/mozc_build_tools/gyp/test/library/src/lib1_moveable.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void moveable_function(void) -{ - fprintf(stdout, "Hello from lib1_moveable.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/library/src/lib2.c b/mozc_build_tools/gyp/test/library/src/lib2.c deleted file mode 100644 index 21dda72..0000000 --- a/mozc_build_tools/gyp/test/library/src/lib2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void lib2_function(void) -{ - fprintf(stdout, "Hello from lib2.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/library/src/lib2_moveable.c b/mozc_build_tools/gyp/test/library/src/lib2_moveable.c deleted file mode 100644 index f645071..0000000 --- a/mozc_build_tools/gyp/test/library/src/lib2_moveable.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void moveable_function(void) -{ - fprintf(stdout, "Hello from lib2_moveable.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/library/src/library.gyp b/mozc_build_tools/gyp/test/library/src/library.gyp deleted file mode 100644 index bc35516..0000000 --- a/mozc_build_tools/gyp/test/library/src/library.gyp +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'moveable_function%': 0, - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1', - 'lib2', - ], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': '<(library)', - 'sources': [ - 'lib1.c', - ], - 'conditions': [ - ['moveable_function=="lib1"', { - 'sources': [ - 'lib1_moveable.c', - ], - }], - ], - }, - { - 'target_name': 'lib2', - 'type': '<(library)', - 'sources': [ - 'lib2.c', - ], - 'conditions': [ - ['moveable_function=="lib2"', { - 'sources': [ - 'lib2_moveable.c', - ], - }], - ], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - # Support 64-bit shared libs (also works fine for 32-bit). - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/mozc_build_tools/gyp/test/library/src/program.c b/mozc_build_tools/gyp/test/library/src/program.c deleted file mode 100644 index d7712cc..0000000 --- a/mozc_build_tools/gyp/test/library/src/program.c +++ /dev/null @@ -1,15 +0,0 @@ -#include - -extern void lib1_function(void); -extern void lib2_function(void); -extern void moveable_function(void); - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - lib1_function(); - lib2_function(); - moveable_function(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/module/gyptest-default.py b/mozc_build_tools/gyp/test/module/gyptest-default.py deleted file mode 100644 index 6b1c9b6..0000000 --- a/mozc_build_tools/gyp/test/module/gyptest-default.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with loadable modules. The -default for all platforms should be to output the loadable modules to the same -path as the executable. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('module.gyp', chdir='src') - -test.build('module.gyp', test.ALL, chdir='src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -""" -test.run_built_executable('program', chdir='src', stdout=expect) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/module/src/lib1.c b/mozc_build_tools/gyp/test/module/src/lib1.c deleted file mode 100644 index 8de0e94..0000000 --- a/mozc_build_tools/gyp/test/module/src/lib1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void module_main(void) -{ - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/module/src/lib2.c b/mozc_build_tools/gyp/test/module/src/lib2.c deleted file mode 100644 index 266396d..0000000 --- a/mozc_build_tools/gyp/test/module/src/lib2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void module_main(void) -{ - fprintf(stdout, "Hello from lib2.c\n"); - fflush(stdout); -} diff --git a/mozc_build_tools/gyp/test/module/src/module.gyp b/mozc_build_tools/gyp/test/module/src/module.gyp deleted file mode 100644 index bb43c30..0000000 --- a/mozc_build_tools/gyp/test/module/src/module.gyp +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - 'defines': ['PLATFORM_WIN'], - }], - ['OS=="mac"', { - 'defines': ['PLATFORM_MAC'], - }], - ['OS=="linux"', { - 'defines': ['PLATFORM_LINUX'], - # Support 64-bit shared libs (also works fine for 32-bit). - 'cflags': ['-fPIC'], - 'ldflags': ['-ldl'], - }], - ], - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1', - 'lib2', - ], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': 'loadable_module', - 'product_name': 'lib1', - 'product_prefix': '', - 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''}, - 'sources': [ - 'lib1.c', - ], - }, - { - 'target_name': 'lib2', - 'product_name': 'lib2', - 'product_prefix': '', - 'type': 'loadable_module', - 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''}, - 'sources': [ - 'lib2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/module/src/program.c b/mozc_build_tools/gyp/test/module/src/program.c deleted file mode 100644 index a48f940..0000000 --- a/mozc_build_tools/gyp/test/module/src/program.c +++ /dev/null @@ -1,111 +0,0 @@ -#include -#include - -#if defined(PLATFORM_WIN) -#include -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) -#include -#include -#include -#include -#define MAX_PATH PATH_MAX -#endif - -#if defined(PLATFORM_WIN) -#define MODULE_SUFFIX ".dll" -#elif defined(PLATFORM_MAC) -#define MODULE_SUFFIX ".dylib" -#elif defined(PLATFORM_LINUX) -#define MODULE_SUFFIX ".so" -#endif - -typedef void (*module_symbol)(void); -char bin_path[MAX_PATH + 1]; - - -void CallModule(const char* module) { - char module_path[MAX_PATH + 1]; - const char* module_function = "module_main"; - module_symbol funcptr; -#if defined(PLATFORM_WIN) - HMODULE dl; - char drive[_MAX_DRIVE]; - char dir[_MAX_DIR]; - - if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR, - NULL, 0, NULL, 0)) { - fprintf(stderr, "Failed to split executable path.\n"); - return; - } - if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) { - fprintf(stderr, "Failed to calculate module path.\n"); - return; - } - - dl = LoadLibrary(module_path); - if (!dl) { - fprintf(stderr, "Failed to open module: %s\n", module_path); - return; - } - - funcptr = (module_symbol) GetProcAddress(dl, module_function); - if (!funcptr) { - fprintf(stderr, "Failed to find symbol: %s\n", module_function); - return; - } - funcptr(); - - FreeLibrary(dl); -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) - void* dl; - char* path_copy = strdup(bin_path); - char* bin_dir = dirname(path_copy); - int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module, - MODULE_SUFFIX); - free(path_copy); - if (path_size < 0 || path_size > MAX_PATH) { - fprintf(stderr, "Failed to calculate module path.\n"); - return; - } - module_path[path_size] = 0; - - dl = dlopen(module_path, RTLD_LAZY); - if (!dl) { - fprintf(stderr, "Failed to open module: %s\n", module_path); - return; - } - - funcptr = dlsym(dl, module_function); - if (!funcptr) { - fprintf(stderr, "Failed to find symbol: %s\n", module_function); - return; - } - funcptr(); - - dlclose(dl); -#endif -} - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - -#if defined(PLATFORM_WIN) - if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) { - fprintf(stderr, "Failed to determine executable path.\n"); - return; - } -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) - // Using argv[0] should be OK here since we control how the tests run, and - // can avoid exec and such issues that make it unreliable. - if (!realpath(argv[0], bin_path)) { - fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]); - return; - } -#endif - - CallModule("lib1"); - CallModule("lib2"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/msvs/express/base/base.gyp b/mozc_build_tools/gyp/test/msvs/express/base/base.gyp deleted file mode 100644 index b7c9fc6..0000000 --- a/mozc_build_tools/gyp/test/msvs/express/base/base.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - }, - { - 'target_name': 'b', - 'type': 'static_library', - 'sources': [ - 'b.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/msvs/express/express.gyp b/mozc_build_tools/gyp/test/msvs/express/express.gyp deleted file mode 100644 index 917abe2..0000000 --- a/mozc_build_tools/gyp/test/msvs/express/express.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'express', - 'type': 'executable', - 'dependencies': [ - 'base/base.gyp:a', - 'base/base.gyp:b', - ], - 'sources': [ - 'main.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/msvs/express/gyptest-express.py b/mozc_build_tools/gyp/test/msvs/express/gyptest-express.py deleted file mode 100644 index 54c06f6..0000000 --- a/mozc_build_tools/gyp/test/msvs/express/gyptest-express.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that flat solutions get generated for Express versions of -Visual Studio. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('express.gyp', '-G', 'msvs_version=2005') -test.must_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2008') -test.must_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2005e') -test.must_not_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2008e') -test.must_not_contain('express.sln', '(base)') - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/multiple-targets/gyptest-all.py b/mozc_build_tools/gyp/test/multiple-targets/gyptest-all.py deleted file mode 100644 index 9f157c4..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/gyptest-all.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('multiple.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# TODO(sgk): remove stderr=None when the --generator-output= support -# gets rid of the scons warning -test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None) - -expect1 = """\ -hello from prog1.c -hello from common.c -""" - -expect2 = """\ -hello from prog2.c -hello from common.c -""" - -test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src') -test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/multiple-targets/gyptest-default.py b/mozc_build_tools/gyp/test/multiple-targets/gyptest-default.py deleted file mode 100644 index 8d5072d..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/gyptest-default.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('multiple.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# TODO(sgk): remove stderr=None when the --generator-output= support -# gets rid of the scons warning -test.build('multiple.gyp', chdir='relocate/src', stderr=None) - -expect1 = """\ -hello from prog1.c -hello from common.c -""" - -expect2 = """\ -hello from prog2.c -hello from common.c -""" - -test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src') -test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/multiple-targets/src/common.c b/mozc_build_tools/gyp/test/multiple-targets/src/common.c deleted file mode 100644 index f1df7c1..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/src/common.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -void common(void) -{ - printf("hello from common.c\n"); - return; -} diff --git a/mozc_build_tools/gyp/test/multiple-targets/src/multiple.gyp b/mozc_build_tools/gyp/test/multiple-targets/src/multiple.gyp deleted file mode 100644 index 3db4ea3..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/src/multiple.gyp +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - 'common.c', - ], - }, - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - 'common.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/multiple-targets/src/prog1.c b/mozc_build_tools/gyp/test/multiple-targets/src/prog1.c deleted file mode 100644 index d55f8af..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/src/prog1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void common(void); - -int main(int argc, char *argv[]) -{ - printf("hello from prog1.c\n"); - common(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/multiple-targets/src/prog2.c b/mozc_build_tools/gyp/test/multiple-targets/src/prog2.c deleted file mode 100644 index 760590e..0000000 --- a/mozc_build_tools/gyp/test/multiple-targets/src/prog2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void common(void); - -int main(int argc, char *argv[]) -{ - printf("hello from prog2.c\n"); - common(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/no-output/gyptest-no-output.py b/mozc_build_tools/gyp/test/no-output/gyptest-no-output.py deleted file mode 100644 index 8431241..0000000 --- a/mozc_build_tools/gyp/test/no-output/gyptest-no-output.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verified things don't explode when there are targets without outputs. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('nooutput.gyp', chdir='src') -test.relocate('src', 'relocate/src') -test.build('nooutput.gyp', chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/no-output/src/nooutput.gyp b/mozc_build_tools/gyp/test/no-output/src/nooutput.gyp deleted file mode 100644 index c40124e..0000000 --- a/mozc_build_tools/gyp/test/no-output/src/nooutput.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'no_output', - 'type': 'none', - 'direct_dependent_settings': { - 'defines': [ - 'NADA', - ], - }, - }, - ], -} diff --git a/mozc_build_tools/gyp/test/product/gyptest-product.py b/mozc_build_tools/gyp/test/product/gyptest-product.py deleted file mode 100644 index e9790f3..0000000 --- a/mozc_build_tools/gyp/test/product/gyptest-product.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('product.gyp') -test.build('product.gyp') - -# executables -test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True) -test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True) - -# shared libraries -test.built_file_must_exist(test.dll_ + 'alt4' + test._dll, - test.SHARED_LIB, bare=True) -test.built_file_must_exist(test.dll_ + 'hello5.stuff', - test.SHARED_LIB, bare=True) -test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True) - -# static libraries -test.built_file_must_exist(test.lib_ + 'alt7' + test._lib, - test.STATIC_LIB, bare=True) -test.built_file_must_exist(test.lib_ + 'hello8.stuff', - test.STATIC_LIB, bare=True) -test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True) - -# alternate product_dir -test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/product/hello.c b/mozc_build_tools/gyp/test/product/hello.c deleted file mode 100644 index 94798f3..0000000 --- a/mozc_build_tools/gyp/test/product/hello.c +++ /dev/null @@ -1,15 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int func1(void) { - return 42; -} - -int main(int argc, char *argv[]) { - printf("Hello, world!\n"); - printf("%d\n", func1()); - return 0; -} diff --git a/mozc_build_tools/gyp/test/product/product.gyp b/mozc_build_tools/gyp/test/product/product.gyp deleted file mode 100644 index c25eaaa..0000000 --- a/mozc_build_tools/gyp/test/product/product.gyp +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello1', - 'product_name': 'alt1', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello2', - 'product_extension': 'stuff', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello3', - 'product_name': 'alt3', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - - { - 'target_name': 'hello4', - 'product_name': 'alt4', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello5', - 'product_extension': 'stuff', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello6', - 'product_name': 'alt6', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - - { - 'target_name': 'hello7', - 'product_name': 'alt7', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello8', - 'product_extension': 'stuff', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello9', - 'product_name': 'alt9', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello10', - 'product_name': 'alt10', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello11', - 'product_name': 'alt11', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello12', - 'product_name': 'alt12', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/mozc_build_tools/gyp/test/rules/gyptest-all.py b/mozc_build_tools/gyp/test/rules/gyptest-all.py deleted file mode 100644 index 46cfeef..0000000 --- a/mozc_build_tools/gyp/test/rules/gyptest-all.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple rules when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('actions.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from function1.in -Hello from function2.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -expect = """\ -Hello from program.c -Hello from function3.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('program2', chdir=chdir, stdout=expect) - -test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n") -test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/rules/gyptest-default.py b/mozc_build_tools/gyp/test/rules/gyptest-default.py deleted file mode 100644 index f9c7906..0000000 --- a/mozc_build_tools/gyp/test/rules/gyptest-default.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple rules when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('actions.gyp', chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from function1.in -Hello from function2.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -expect = """\ -Hello from program.c -Hello from function3.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('program2', chdir=chdir, stdout=expect) - -test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n") -test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/rules/src/actions.gyp b/mozc_build_tools/gyp/test/rules/src/actions.gyp deleted file mode 100644 index 3e9f8b5..0000000 --- a/mozc_build_tools/gyp/test/rules/src/actions.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - 'subdir3/executable2.gyp:*', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/rules/src/copy-file.py b/mozc_build_tools/gyp/test/rules/src/copy-file.py deleted file mode 100644 index 5a5feae..0000000 --- a/mozc_build_tools/gyp/test/rules/src/copy-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -import sys - -contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/rules/src/subdir1/executable.gyp b/mozc_build_tools/gyp/test/rules/src/subdir1/executable.gyp deleted file mode 100644 index 3028577..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir1/executable.gyp +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function1.in', - 'function2.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir1/function1.in b/mozc_build_tools/gyp/test/rules/src/subdir1/function1.in deleted file mode 100644 index 60ff289..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir1/function1.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function1(void) -{ - printf("Hello from function1.in\n"); -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir1/function2.in b/mozc_build_tools/gyp/test/rules/src/subdir1/function2.in deleted file mode 100644 index 0fcfc03..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir1/function2.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function2(void) -{ - printf("Hello from function2.in\n"); -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir1/program.c b/mozc_build_tools/gyp/test/rules/src/subdir1/program.c deleted file mode 100644 index 258d7f9..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void function1(void); -extern void function2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function1(); - function2(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir2/file1.in b/mozc_build_tools/gyp/test/rules/src/subdir2/file1.in deleted file mode 100644 index 86ac3ad..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir2/file1.in +++ /dev/null @@ -1 +0,0 @@ -Hello from file1.in diff --git a/mozc_build_tools/gyp/test/rules/src/subdir2/file2.in b/mozc_build_tools/gyp/test/rules/src/subdir2/file2.in deleted file mode 100644 index bf83d8e..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir2/file2.in +++ /dev/null @@ -1 +0,0 @@ -Hello from file2.in diff --git a/mozc_build_tools/gyp/test/rules/src/subdir2/none.gyp b/mozc_build_tools/gyp/test/rules/src/subdir2/none.gyp deleted file mode 100644 index 38bcdab..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir2/none.gyp +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'files', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'file1.in', - 'file2.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - '<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir3/executable2.gyp b/mozc_build_tools/gyp/test/rules/src/subdir3/executable2.gyp deleted file mode 100644 index a2a528f..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir3/executable2.gyp +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This one tests that rules are properly written if extensions are different -# between the target's sources (program.c) and the generated files -# (function3.cc) - -{ - 'targets': [ - { - 'target_name': 'program2', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function3.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir3/function3.in b/mozc_build_tools/gyp/test/rules/src/subdir3/function3.in deleted file mode 100644 index 99f46ab..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir3/function3.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -extern "C" void function3(void) -{ - printf("Hello from function3.in\n"); -} diff --git a/mozc_build_tools/gyp/test/rules/src/subdir3/program.c b/mozc_build_tools/gyp/test/rules/src/subdir3/program.c deleted file mode 100644 index 94f6c50..0000000 --- a/mozc_build_tools/gyp/test/rules/src/subdir3/program.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void function3(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function3(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/rules-rebuild/gyptest-all.py b/mozc_build_tools/gyp/test/rules-rebuild/gyptest-all.py deleted file mode 100644 index 549d48b..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/gyptest-all.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a rule that generates multiple outputs rebuilds -correctly when the inputs change. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -if test.format == 'msvs': - msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n' - test.skip_test(msg) - -test.run_gyp('same_target.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog1.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog1.in'], contents) - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog2.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog2.in'], contents) - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in AGAIN! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/rules-rebuild/gyptest-default.py b/mozc_build_tools/gyp/test/rules-rebuild/gyptest-default.py deleted file mode 100644 index a669105..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/gyptest-default.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a rule that generates multiple outputs rebuilds -correctly when the inputs change. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -if test.format == 'msvs': - msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n' - test.skip_test(msg) - -test.run_gyp('same_target.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog1.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog1.in'], contents) - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog2.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog2.in'], contents) - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in AGAIN! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/rules-rebuild/src/main.c b/mozc_build_tools/gyp/test/rules-rebuild/src/main.c deleted file mode 100644 index bdc5ec8..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/src/main.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from main.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/mozc_build_tools/gyp/test/rules-rebuild/src/make-sources.py b/mozc_build_tools/gyp/test/rules-rebuild/src/make-sources.py deleted file mode 100644 index 6fce558..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/src/make-sources.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -import sys - -assert len(sys.argv) == 4, sys.argv - -(in_file, c_file, h_file) = sys.argv[1:] - -def write_file(filename, contents): - open(filename, 'wb').write(contents) - -write_file(c_file, open(in_file, 'rb').read()) - -write_file(h_file, '#define NAME "%s"\n' % in_file) - -sys.exit(0) diff --git a/mozc_build_tools/gyp/test/rules-rebuild/src/prog1.in b/mozc_build_tools/gyp/test/rules-rebuild/src/prog1.in deleted file mode 100644 index 191b00e..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/src/prog1.in +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "prog1.h" - -void prog1(void) -{ - printf("Hello from %s!\n", NAME); -} diff --git a/mozc_build_tools/gyp/test/rules-rebuild/src/prog2.in b/mozc_build_tools/gyp/test/rules-rebuild/src/prog2.in deleted file mode 100644 index 7bfac51..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/src/prog2.in +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "prog2.h" - -void prog2(void) -{ - printf("Hello from %s!\n", NAME); -} diff --git a/mozc_build_tools/gyp/test/rules-rebuild/src/same_target.gyp b/mozc_build_tools/gyp/test/rules-rebuild/src/same_target.gyp deleted file mode 100644 index 148287f..0000000 --- a/mozc_build_tools/gyp/test/rules-rebuild/src/same_target.gyp +++ /dev/null @@ -1,32 +0,0 @@ -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'main.c', - 'prog1.in', - 'prog2.in', - ], - 'rules': [ - { - 'rule_name': 'make_sources', - 'extension': 'in', - 'msvs_external_rule': 1, - 'inputs': [ - 'make-sources.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/same-gyp-name/gyptest-all.py b/mozc_build_tools/gyp/test/same-gyp-name/gyptest-all.py deleted file mode 100644 index 7645688..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/gyptest-all.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp that depends on 2 gyp files with the same name. -""" - -import TestGyp - -# This causes a problem on XCode (duplicate ID). -# See http://code.google.com/p/gyp/issues/detail?id=114 -test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make']) - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -expect1 = """\ -Hello from main1.cc -""" - -expect2 = """\ -Hello from main2.cc -""" - -test.run_built_executable('program1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('program2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/same-gyp-name/gyptest-default.py b/mozc_build_tools/gyp/test/same-gyp-name/gyptest-default.py deleted file mode 100644 index c1031f8..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/gyptest-default.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp that depends on 2 gyp files with the same name. -""" - -import TestGyp - -# This causes a problem on XCode (duplicate ID). -# See http://code.google.com/p/gyp/issues/detail?id=114 -test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make']) - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', chdir='relocate/src') - -expect1 = """\ -Hello from main1.cc -""" - -expect2 = """\ -Hello from main2.cc -""" - -test.run_built_executable('program1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('program2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/same-gyp-name/src/all.gyp b/mozc_build_tools/gyp/test/same-gyp-name/src/all.gyp deleted file mode 100644 index 229f02e..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/src/all.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'all_exes', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/executable.gyp:*', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp b/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp deleted file mode 100644 index 82483b4..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program1', - 'type': 'executable', - 'sources': [ - 'main1.cc', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/main1.cc b/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/main1.cc deleted file mode 100644 index 3645558..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir1/main1.cc +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main() { - printf("Hello from main1.cc\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp b/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp deleted file mode 100644 index e353701..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program2', - 'type': 'executable', - 'sources': [ - 'main2.cc', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/main2.cc b/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/main2.cc deleted file mode 100644 index 0c724de..0000000 --- a/mozc_build_tools/gyp/test/same-gyp-name/src/subdir2/main2.cc +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main() { - printf("Hello from main2.cc\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/same-name/gyptest-all.py b/mozc_build_tools/gyp/test/same-name/gyptest-all.py deleted file mode 100644 index 4c21502..0000000 --- a/mozc_build_tools/gyp/test/same-name/gyptest-all.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp with two targets that share a common .c source file. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello prog1 from func.c -""" - -expect2 = """\ -Hello from prog2.c -Hello prog2 from func.c -""" - -test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/same-name/gyptest-default.py b/mozc_build_tools/gyp/test/same-name/gyptest-default.py deleted file mode 100644 index 98757c2..0000000 --- a/mozc_build_tools/gyp/test/same-name/gyptest-default.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp with two targets that share a common .c source file. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello prog1 from func.c -""" - -expect2 = """\ -Hello from prog2.c -Hello prog2 from func.c -""" - -test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/same-name/src/all.gyp b/mozc_build_tools/gyp/test/same-name/src/all.gyp deleted file mode 100644 index 44e1049..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/all.gyp +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'defines': [ - 'PROG="prog1"', - ], - 'sources': [ - 'prog1.c', - 'func.c', - # Uncomment to test same-named files in different directories, - # which Visual Studio doesn't support. - #'subdir1/func.c', - #'subdir2/func.c', - ], - }, - { - 'target_name': 'prog2', - 'type': 'executable', - 'defines': [ - 'PROG="prog2"', - ], - 'sources': [ - 'prog2.c', - 'func.c', - # Uncomment to test same-named files in different directories, - # which Visual Studio doesn't support. - #'subdir1/func.c', - #'subdir2/func.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/same-name/src/func.c b/mozc_build_tools/gyp/test/same-name/src/func.c deleted file mode 100644 index e069c69..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func(void) -{ - printf("Hello %s from func.c\n", PROG); -} diff --git a/mozc_build_tools/gyp/test/same-name/src/prog1.c b/mozc_build_tools/gyp/test/same-name/src/prog1.c deleted file mode 100644 index c8940fe..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/prog1.c +++ /dev/null @@ -1,16 +0,0 @@ -#include - -extern void func(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - func(); - /* - * Uncomment to test same-named files in different directories, - * which Visual Studio doesn't support. - subdir1_func(); - subdir2_func(); - */ - return 0; -} diff --git a/mozc_build_tools/gyp/test/same-name/src/prog2.c b/mozc_build_tools/gyp/test/same-name/src/prog2.c deleted file mode 100644 index e6605c2..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/prog2.c +++ /dev/null @@ -1,16 +0,0 @@ -#include - -extern void func(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - func(); - /* - * Uncomment to test same-named files in different directories, - * which Visual Studio doesn't support. - subdir1_func(); - subdir2_func(); - */ - return 0; -} diff --git a/mozc_build_tools/gyp/test/same-name/src/subdir1/func.c b/mozc_build_tools/gyp/test/same-name/src/subdir1/func.c deleted file mode 100644 index b73450d..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/subdir1/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void subdir1_func(void) -{ - printf("Hello %s from subdir1/func.c\n", PROG); -} diff --git a/mozc_build_tools/gyp/test/same-name/src/subdir2/func.c b/mozc_build_tools/gyp/test/same-name/src/subdir2/func.c deleted file mode 100644 index 0248b57..0000000 --- a/mozc_build_tools/gyp/test/same-name/src/subdir2/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void subdir2_func(void) -{ - printf("Hello %s from subdir2/func.c\n", PROG); -} diff --git a/mozc_build_tools/gyp/test/scons_tools/gyptest-tools.py b/mozc_build_tools/gyp/test/scons_tools/gyptest-tools.py deleted file mode 100755 index e97f5e6..0000000 --- a/mozc_build_tools/gyp/test/scons_tools/gyptest-tools.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a scons build picks up tools modules specified -via 'scons_tools' in the 'scons_settings' dictionary. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('tools.gyp') - -test.build('tools.gyp', test.ALL) - -if test.format == 'scons': - expect = "Hello, world!\n" -else: - expect = "" -test.run_built_executable('tools', stdout=expect) - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/scons_tools/site_scons/site_tools/this_tool.py b/mozc_build_tools/gyp/test/scons_tools/site_scons/site_tools/this_tool.py deleted file mode 100644 index 10c8947..0000000 --- a/mozc_build_tools/gyp/test/scons_tools/site_scons/site_tools/this_tool.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# SCons "tool" module that simply sets a -D value. -def generate(env): - env['CPPDEFINES'] = ['THIS_TOOL'] - -def exists(env): - pass diff --git a/mozc_build_tools/gyp/test/scons_tools/tools.c b/mozc_build_tools/gyp/test/scons_tools/tools.c deleted file mode 100644 index 78dc0e3..0000000 --- a/mozc_build_tools/gyp/test/scons_tools/tools.c +++ /dev/null @@ -1,13 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ -#ifdef THIS_TOOL - printf("Hello, world!\n"); -#endif - return 0; -} diff --git a/mozc_build_tools/gyp/test/scons_tools/tools.gyp b/mozc_build_tools/gyp/test/scons_tools/tools.gyp deleted file mode 100644 index 736ba3f..0000000 --- a/mozc_build_tools/gyp/test/scons_tools/tools.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'tools', - 'type': 'executable', - 'sources': [ - 'tools.c', - ], - }, - ], - 'scons_settings': { - 'tools': ['default', 'this_tool'], - }, -} diff --git a/mozc_build_tools/gyp/test/sibling/gyptest-all.py b/mozc_build_tools/gyp/test/sibling/gyptest-all.py deleted file mode 100644 index c04c2d4..0000000 --- a/mozc_build_tools/gyp/test/sibling/gyptest-all.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('build/all.gyp', chdir='src') - -test.build('build/all.gyp', test.ALL, chdir='src') - -chdir = 'src/build' - -# The top-level Makefile is in the directory where gyp was run. -# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp -# file? What about when passing in multiple .gyp files? Would sub-project -# Makefiles (see http://codereview.chromium.org/340008 comments) solve this? -if test.format == 'make': - chdir = 'src' - -if test.format == 'xcode': - chdir = 'src/prog1' -test.run_built_executable('prog1', - chdir=chdir, - stdout="Hello from prog1.c\n") - -if test.format == 'xcode': - chdir = 'src/prog2' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/sibling/gyptest-relocate.py b/mozc_build_tools/gyp/test/sibling/gyptest-relocate.py deleted file mode 100644 index 176545f..0000000 --- a/mozc_build_tools/gyp/test/sibling/gyptest-relocate.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('build/all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('build/all.gyp', test.ALL, chdir='relocate/src') - -chdir = 'relocate/src/build' - -# The top-level Makefile is in the directory where gyp was run. -# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp -# file? What about when passing in multiple .gyp files? Would sub-project -# Makefiles (see http://codereview.chromium.org/340008 comments) solve this? -if test.format == 'make': - chdir = 'relocate/src' - -if test.format == 'xcode': - chdir = 'relocate/src/prog1' -test.run_built_executable('prog1', - chdir=chdir, - stdout="Hello from prog1.c\n") - -if test.format == 'xcode': - chdir = 'relocate/src/prog2' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/sibling/src/build/all.gyp b/mozc_build_tools/gyp/test/sibling/src/build/all.gyp deleted file mode 100644 index 6eafdf9..0000000 --- a/mozc_build_tools/gyp/test/sibling/src/build/all.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - # TODO(sgk): a target name of 'all' leads to a scons dependency cycle - 'target_name': 'All', - 'type': 'none', - 'dependencies': [ - '../prog1/prog1.gyp:*', - '../prog2/prog2.gyp:*', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.c b/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.c deleted file mode 100644 index 161ae8a..0000000 --- a/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.gyp b/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.gyp deleted file mode 100644 index fbe38b9..0000000 --- a/mozc_build_tools/gyp/test/sibling/src/prog1/prog1.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.c b/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.c deleted file mode 100644 index 7635ae8..0000000 --- a/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.gyp b/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.gyp deleted file mode 100644 index 5934548..0000000 --- a/mozc_build_tools/gyp/test/sibling/src/prog2/prog2.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py deleted file mode 100644 index b750904..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -The configuration sets the Xcode SYMROOT variable and uses --depth= -to make Xcode behave like the other build tools--that is, put all -built targets in a single output build directory at the top of the tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src') - -test.relocate('src', 'relocate/src') - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') -test.run_built_executable('prog2', - stdout="Hello from prog2.c\n", - chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py deleted file mode 100644 index c64ae7d..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -The configuration sets the Xcode SYMROOT variable and uses --depth= -to make Xcode behave like the other build tools--that is, put all -built targets in a single output build directory at the top of the tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src') - -test.relocate('src', 'relocate/src') - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -test.run_built_executable('prog2', - stdout="Hello from prog2.c\n", - chdir='relocate/src') - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-all.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-all.py deleted file mode 100644 index fbaef32..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-all.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a subsidiary dependent target from a .gyp file in a -subdirectory, without specifying an explicit output build directory, -and using the subdirectory's solution or project file as the entry point. -""" - -import TestGyp -import errno - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -chdir = 'relocate/src/subdir' -target = test.ALL - -test.build('prog2.gyp', target, chdir=chdir) - -test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir) - -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-default.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-default.py deleted file mode 100644 index 6372ea2..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir-default.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a subsidiary dependent target from a .gyp file in a -subdirectory, without specifying an explicit output build directory, -and using the subdirectory's solution or project file as the entry point. -""" - -import TestGyp -import errno - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -chdir = 'relocate/src/subdir' - -test.build('prog2.gyp', chdir=chdir) - -test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir) - -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir2-deep.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir2-deep.py deleted file mode 100644 index 4854898..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-subdir2-deep.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a project rooted several layers under src_dir works. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2') - -test.relocate('src', 'relocate/src') - -test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2') - -test.run_built_executable('prog3', - chdir='relocate/src/subdir/subdir2', - stdout="Hello from prog3.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-top-all.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-top-all.py deleted file mode 100644 index a29a41b..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-top-all.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -There is a difference here in the default behavior of the underlying -build tools. Specifically, when building the entire "solution", Xcode -puts the output of each project relative to the .xcodeproj directory, -while Visual Studio (and our implementations of SCons and Make) put it -in a build directory relative to the "solution"--that is, the entry-point -from which you built the entire tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('prog1.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir' -else: - chdir = 'relocate/src' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/gyptest-top-default.py b/mozc_build_tools/gyp/test/subdirectory/gyptest-top-default.py deleted file mode 100644 index ac5f60d..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/gyptest-top-default.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -There is a difference here in the default behavior of the underlying -build tools. Specifically, when building the entire "solution", Xcode -puts the output of each project relative to the .xcodeproj directory, -while Visual Studio (and our implementations of SCons and Make) put it -in a build directory relative to the "solution"--that is, the entry-point -from which you built the entire tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('prog1.gyp', chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir' -else: - chdir = 'relocate/src' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/subdirectory/src/prog1.c b/mozc_build_tools/gyp/test/subdirectory/src/prog1.c deleted file mode 100644 index 161ae8a..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/prog1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/prog1.gyp b/mozc_build_tools/gyp/test/subdirectory/src/prog1.gyp deleted file mode 100644 index 2aa66ce..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/prog1.gyp +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'dependencies': [ - 'subdir/prog2.gyp:prog2', - ], - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.c b/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.c deleted file mode 100644 index 7635ae8..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.gyp b/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.gyp deleted file mode 100644 index c6cd35f..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/subdir/prog2.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c b/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c deleted file mode 100644 index 7cfb0fa..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog3.c\n"); - return 0; -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp b/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp deleted file mode 100644 index b49fb59..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'sources': [ - 'prog3.c', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/subdirectory/src/symroot.gypi b/mozc_build_tools/gyp/test/subdirectory/src/symroot.gypi deleted file mode 100644 index 5199164..0000000 --- a/mozc_build_tools/gyp/test/subdirectory/src/symroot.gypi +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'set_symroot%': 0, - }, - 'conditions': [ - ['set_symroot == 1', { - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/build', - }, - }], - ], -} diff --git a/mozc_build_tools/gyp/test/toolsets/gyptest-toolsets.py b/mozc_build_tools/gyp/test/toolsets/gyptest-toolsets.py deleted file mode 100644 index 19737f8..0000000 --- a/mozc_build_tools/gyp/test/toolsets/gyptest-toolsets.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that toolsets are correctly applied -""" - -import TestGyp - -# Multiple toolsets are currently only supported by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -test.run_gyp('toolsets.gyp') - -test.build('toolsets.gyp', test.ALL) - -test.run_built_executable('host-main', stdout="Host\n") -test.run_built_executable('target-main', stdout="Target\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/toolsets/main.cc b/mozc_build_tools/gyp/test/toolsets/main.cc deleted file mode 100644 index 0f353ae..0000000 --- a/mozc_build_tools/gyp/test/toolsets/main.cc +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -const char *GetToolset(); - -int main(int argc, char *argv[]) { - printf("%s\n", GetToolset()); -} diff --git a/mozc_build_tools/gyp/test/toolsets/toolsets.cc b/mozc_build_tools/gyp/test/toolsets/toolsets.cc deleted file mode 100644 index a45fa02..0000000 --- a/mozc_build_tools/gyp/test/toolsets/toolsets.cc +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -const char *GetToolset() { -#ifdef TARGET - return "Target"; -#else - return "Host"; -#endif -} diff --git a/mozc_build_tools/gyp/test/toolsets/toolsets.gyp b/mozc_build_tools/gyp/test/toolsets/toolsets.gyp deleted file mode 100644 index e41b928..0000000 --- a/mozc_build_tools/gyp/test/toolsets/toolsets.gyp +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'target_conditions': [ - ['_toolset=="target"', {'defines': ['TARGET']}] - ] - }, - 'targets': [ - { - 'target_name': 'toolsets', - 'type': 'static_library', - 'toolsets': ['target', 'host'], - 'sources': [ - 'toolsets.cc', - ], - }, - { - 'target_name': 'host-main', - 'type': 'executable', - 'toolsets': ['host'], - 'dependencies': ['toolsets'], - 'sources': [ - 'main.cc', - ], - }, - { - 'target_name': 'target-main', - 'type': 'executable', - 'dependencies': ['toolsets'], - 'sources': [ - 'main.cc', - ], - }, - ], -} diff --git a/mozc_build_tools/gyp/test/variables/commands/commands-repeated.gyp b/mozc_build_tools/gyp/test/variables/commands/commands-repeated.gyp deleted file mode 100644 index 822ae4f..0000000 --- a/mozc_build_tools/gyp/test/variables/commands/commands-repeated.gyp +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This is a simple test file to make sure that variable substitution -# happens correctly. Run "run_tests.py" using python to generate the -# output from this gyp file. - -{ - 'variables': { - 'pi': 'import math; print math.pi', - 'third_letters': "<(other_letters)HIJK", - 'letters_list': 'ABCD', - 'other_letters': '<(letters_list)EFG', - 'check_included': '<(included_variable)', - 'check_lists': [ - '<(included_variable)', - '<(third_letters)', - ], - 'check_int': 5, - 'check_str_int': '6', - 'check_list_int': [ - 7, - '8', - 9, - ], - 'not_int_1': ' 10', - 'not_int_2': '11 ', - 'not_int_3': '012', - 'not_int_4': '13.0', - 'not_int_5': '+14', - 'negative_int': '-15', - 'zero_int': '0', - }, - 'includes': [ - 'commands.gypi', - ], - 'targets': [ - { - 'target_name': 'foo', - 'type': 'none', - 'variables': { - 'var1': ' commands.gyp.stdout -python ../../../gyp --ignore-environment --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout -cp -f commands.gypd commands.gypd.golden -python ../../../gyp --debug variables --debug general --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout -cp -f commands-repeated.gypd commands-repeated.gypd.golden diff --git a/mozc_build_tools/gyp/test/variables/filelist/filelist.gyp.stdout b/mozc_build_tools/gyp/test/variables/filelist/filelist.gyp.stdout deleted file mode 100644 index f131ee2..0000000 --- a/mozc_build_tools/gyp/test/variables/filelist/filelist.gyp.stdout +++ /dev/null @@ -1,173 +0,0 @@ -GENERAL: running with these options: -GENERAL: msvs_version: None -GENERAL: suffix: '' -GENERAL: includes: None -GENERAL: use_environment: True -GENERAL: depth: '.' -GENERAL: generator_flags: [] -GENERAL: generator_output: None -GENERAL: formats: ['gypd'] -GENERAL: debug: ['variables', 'general'] -GENERAL: circular_check: True -GENERAL: check: None -GENERAL: defines: None -GENERAL: cmdline_default_variables: {} -GENERAL: generator_flags: {} -VARIABLES: Expanding 'exclude' to 'exclude' -VARIABLES: Expanding 'Sch.*' to 'Sch.*' -VARIABLES: Expanding 'include' to 'include' -VARIABLES: Expanding '.*dt' to '.*dt' -VARIABLES: Expanding 'exclude' to 'exclude' -VARIABLES: Expanding 'Jer.*' to 'Jer.*' -VARIABLES: Expanding 'John' to 'John' -VARIABLES: Expanding 'Jacob' to 'Jacob' -VARIABLES: Expanding 'Astor' to 'Astor' -VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer' -VARIABLES: Expanding 'Jerome' to 'Jerome' -VARIABLES: Expanding 'Schmidt' to 'Schmidt' -VARIABLES: Expanding 'Schultz' to 'Schultz' -VARIABLES: Expanding 'Astor' to 'Astor' -VARIABLES: Expanding '.' to '.' -VARIABLES: Matches: {'content': 'names.txt <@(names', 'is_array': '', 'type': '<|', 'replace': '<|(names.txt <@(names)'} -VARIABLES: Matches: {'content': 'names', 'is_array': '', 'type': '<@', 'replace': '<@(names)'} -VARIABLES: Expanding 'names' to 'names' -VARIABLES: Expanding 'John' to 'John' -VARIABLES: Expanding 'Jacob' to 'Jacob' -VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer' -VARIABLES: Expanding 'Schmidt' to 'Schmidt' -VARIABLES: Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing. -VARIABLES: Expanding 'names.txt John Jacob Jingleheimer Schmidt' to 'names.txt John Jacob Jingleheimer Schmidt' -VARIABLES: Expanding 'names.txt <@(names)' to 'names.txt John Jacob Jingleheimer Schmidt' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<|(names.txt <@(names))' to 'names.txt' -VARIABLES: Expanding 'foo' to 'foo' -VARIABLES: Expanding 'target' to 'target' -VARIABLES: Expanding 'none' to 'none' -VARIABLES: Expanding 'test_action' to 'test_action' -VARIABLES: Expanding 'python' to 'python' -VARIABLES: Expanding 'dummy.py' to 'dummy.py' -VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'} -VARIABLES: Expanding 'names_listfile' to 'names_listfile' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<(names_listfile)' to 'names.txt' -VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'} -VARIABLES: Expanding 'names_listfile' to 'names_listfile' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<(names_listfile)' to 'names.txt' -VARIABLES: Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'type': ' filelist.gyp.stdout -cp -f src/filelist.gypd filelist.gypd.golden diff --git a/mozc_build_tools/gyp/test/variants/gyptest-variants.py b/mozc_build_tools/gyp/test/variants/gyptest-variants.py deleted file mode 100644 index ce2455f..0000000 --- a/mozc_build_tools/gyp/test/variants/gyptest-variants.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify handling of build variants. - -TODO: Right now, only the SCons generator supports this, so the -test case is SCons-specific. In particular, it relise on SCons' -ability to rebuild in response to changes on the command line. It -may be simpler to just drop this feature if the other generators -can't be made to behave the same way. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['scons']) - -test.run_gyp('variants.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('variants.gyp', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello, world!\n") - -test.sleep() -test.build('variants.gyp', 'VARIANT1=1', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello from VARIANT1\n") - -test.sleep() -test.build('variants.gyp', 'VARIANT2=1', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello from VARIANT2\n") - -test.pass_test() diff --git a/mozc_build_tools/gyp/test/variants/src/variants.c b/mozc_build_tools/gyp/test/variants/src/variants.c deleted file mode 100644 index 3018e40..0000000 --- a/mozc_build_tools/gyp/test/variants/src/variants.c +++ /dev/null @@ -1,13 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#if defined(VARIANT1) - printf("Hello from VARIANT1\n"); -#elif defined(VARIANT2) - printf("Hello from VARIANT2\n"); -#else - printf("Hello, world!\n"); -#endif - return 0; -} diff --git a/mozc_build_tools/gyp/test/variants/src/variants.gyp b/mozc_build_tools/gyp/test/variants/src/variants.gyp deleted file mode 100644 index 0305ca7..0000000 --- a/mozc_build_tools/gyp/test/variants/src/variants.gyp +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'variants', - 'type': 'executable', - 'sources': [ - 'variants.c', - ], - 'variants': { - 'variant1' : { - 'defines': [ - 'VARIANT1', - ], - }, - 'variant2' : { - 'defines': [ - 'VARIANT2', - ], - }, - }, - }, - ], -} diff --git a/mozc_build_tools/gyp/tools/README b/mozc_build_tools/gyp/tools/README deleted file mode 100644 index 712e4ef..0000000 --- a/mozc_build_tools/gyp/tools/README +++ /dev/null @@ -1,15 +0,0 @@ -pretty_vcproj: - Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] - - They key/value pair are used to resolve vsprops name. - - For example, if I want to diff the base.vcproj project: - - pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt - pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt - - And you can use your favorite diff tool to see the changes. - - Note: In the case of base.vcproj, the original vcproj is one level up the generated one. - I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt - before you perform the diff. \ No newline at end of file diff --git a/mozc_build_tools/gyp/tools/pretty_gyp.py b/mozc_build_tools/gyp/tools/pretty_gyp.py deleted file mode 100644 index 04c7901..0000000 --- a/mozc_build_tools/gyp/tools/pretty_gyp.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2009 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file pretty-prints the contents of a GYP file. - -import sys -import re - -input = [] -if len(sys.argv) > 1: - input_file = open(sys.argv[1]) - input = input_file.read().splitlines() - input_file.close() -else: - input = sys.stdin.read().splitlines() - -# This is used to remove comments when we're counting braces. -comment_re = re.compile(r'\s*#.*') - -# This is used to remove quoted strings when we're counting braces. -# It takes into account quoted quotes, and makes sure that the quotes -# match. -# NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. -quote_re_str = r'(?P[\'"])(.*?)(? 0: - after = True - - # This catches the special case of a closing brace having something - # other than just whitespace ahead of it -- we don't want to - # unindent that until after this line is printed so it stays with - # the previous indentation level. - if cnt < 0 and closing_prefix_re.match(stripline): - after = True - return (cnt, after) - -# This does the main work of indenting the input based on the brace counts. -def prettyprint_input(lines): - indent = 0 - basic_offset = 2 - last_line = "" - for line in lines: - if comment_re.match(line): - print line - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: - (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print " " * (basic_offset * indent) + line - indent += brace_diff - else: - indent += brace_diff - print " " * (basic_offset * indent) + line - else: - print " " * (basic_offset * indent) + line - else: - print "" - last_line = line - -# Split up the double braces. -lines = split_double_braces(input) - -# Indent and print the output. -prettyprint_input(lines) diff --git a/mozc_build_tools/gyp/tools/pretty_sln.py b/mozc_build_tools/gyp/tools/pretty_sln.py deleted file mode 100755 index 0741fff..0000000 --- a/mozc_build_tools/gyp/tools/pretty_sln.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/python2.5 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Prints the information in a sln file in a diffable way. - - It first outputs each projects in alphabetical order with their - dependencies. - - Then it outputs a possible build order. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import re -import sys -import pretty_vcproj - -def BuildProject(project, built, projects, deps): - # if all dependencies are done, we can build it, otherwise we try to build the - # dependency. - # This is not infinite-recursion proof. - for dep in deps[project]: - if dep not in built: - BuildProject(dep, built, projects, deps) - print project - built.append(project) - -def ParseSolution(solution_file): - # All projects, their clsid and paths. - projects = dict() - - # A list of dependencies associated with a project. - dependencies = dict() - - # Regular expressions that matches the SLN format. - # The first line of a project definition. - begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' - '}"\) = "(.*)", "(.*)", "(.*)"$')) - # The last line of a project definition. - end_project = re.compile('^EndProject$') - # The first line of a dependency list. - begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$') - # The last line of a dependency list. - end_dep = re.compile('EndProjectSection$') - # A line describing a dependency. - dep_line = re.compile(' *({.*}) = ({.*})$') - - in_deps = False - solution = open(solution_file) - for line in solution: - results = begin_project.search(line) - if results: - # Hack to remove icu because the diff is too different. - if results.group(1).find('icu') != -1: - continue - # We remove "_gyp" from the names because it helps to diff them. - current_project = results.group(1).replace('_gyp', '') - projects[current_project] = [results.group(2).replace('_gyp', ''), - results.group(3), - results.group(2)] - dependencies[current_project] = [] - continue - - results = end_project.search(line) - if results: - current_project = None - continue - - results = begin_dep.search(line) - if results: - in_deps = True - continue - - results = end_dep.search(line) - if results: - in_deps = False - continue - - results = dep_line.search(line) - if results and in_deps and current_project: - dependencies[current_project].append(results.group(1)) - continue - - # Change all dependencies clsid to name instead. - for project in dependencies: - # For each dependencies in this project - new_dep_array = [] - for dep in dependencies[project]: - # Look for the project name matching this cldis - for project_info in projects: - if projects[project_info][1] == dep: - new_dep_array.append(project_info) - dependencies[project] = sorted(new_dep_array) - - return (projects, dependencies) - -def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" - - for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] - if dep_list: - for dep in dep_list: - print " - %s" % dep - print "" - - print "-- --" - -def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" - - built = [] - for (project, dep_list) in sorted(deps.items()): - if project not in built: - BuildProject(project, built, projects, deps) - - print "-- --" - -def PrintVCProj(projects): - - for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" - - project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), - projects[project][2])) - - pretty = pretty_vcproj - argv = [ '', - project_path, - '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), - ] - argv.extend(sys.argv[3:]) - pretty.main(argv) - -def main(): - # check if we have exactly 1 parameter. - if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] - return - - (projects, deps) = ParseSolution(sys.argv[1]) - PrintDependencies(projects, deps) - PrintBuildOrder(projects, deps) - - if '--recursive' in sys.argv: - PrintVCProj(projects) - -if __name__ == '__main__': - main() - diff --git a/mozc_build_tools/gyp/tools/pretty_vcproj.py b/mozc_build_tools/gyp/tools/pretty_vcproj.py deleted file mode 100755 index 292a39f..0000000 --- a/mozc_build_tools/gyp/tools/pretty_vcproj.py +++ /dev/null @@ -1,316 +0,0 @@ -#!/usr/bin/python2.5 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Make the format of a vcproj really pretty. - - This script normalize and sort an xml. It also fetches all the properties - inside linked vsprops and include them explicitly in the vcproj. - - It outputs the resulting xml to stdout. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import sys - -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -REPLACEMENTS = dict() -ARGUMENTS = None - -class CmpTuple: - """Compare function between 2 tuple.""" - def __call__(self, x, y): - (key1, value1) = x - (key2, value2) = y - return cmp(key1, key2) - -class CmpNode: - """Compare function between 2 xml nodes.""" - - def get_string(self, node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") - - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) - - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value - - return node_string - - def __call__(self, x, y): - return cmp(self.get_string(x), self.get_string(y)) - -def PrettyPrintNode(node, indent=0): - if node.nodeType == Node.TEXT_NODE: - if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) - return - - if node.childNodes: - node.normalize() - # Get the number of attributes - attr_count = 0 - if node.attributes: - attr_count = node.attributes.length - - # Print the main tag - if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) - else: - print '%s<%s' % (' '*indent, node.nodeName) - - all_attributes = [] - for (name, value) in node.attributes.items(): - all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) - for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) - if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) - - for sub_node in node.childNodes: - PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) - -def FlattenFilter(node): - """Returns a list of all the node and sub nodes.""" - node_list = [] - - if (node.attributes and - node.getAttribute('Name') == '_excluded_files'): - # We don't add the "_excluded_files" filter. - return [] - - for current in node.childNodes: - if current.nodeName == 'Filter': - node_list.extend(FlattenFilter(current)) - else: - node_list.append(current) - - return node_list - -def FixFilenames(filenames, current_directory): - new_list = [] - for filename in filenames: - if filename: - for key in REPLACEMENTS: - filename = filename.replace(key, REPLACEMENTS[key]) - os.chdir(current_directory) - filename = filename.strip('"\' ') - if filename.startswith('$'): - new_list.append(filename) - else: - new_list.append(os.path.abspath(filename)) - return new_list - -def AbsoluteNode(node): - # Make all the properties we know about in this node absolute. - if node.attributes: - for (name, value) in node.attributes.items(): - if name in ['InheritedPropertySheets', 'RelativePath', - 'AdditionalIncludeDirectories', - 'IntermediateDirectory', 'OutputDirectory', - 'AdditionalLibraryDirectories']: - # We want to fix up these paths - path_list = value.split(';') - new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) - node.setAttribute(name, ';'.join(new_list)) - if not value: - node.removeAttribute(name) - -def CleanupVcproj(node): - # For each sub node, we call recursively this function. - for sub_node in node.childNodes: - AbsoluteNode(sub_node) - CleanupVcproj(sub_node) - - # Normalize the node, and remove all extranous whitespaces. - for sub_node in node.childNodes: - if sub_node.nodeType == Node.TEXT_NODE: - sub_node.data = sub_node.data.replace("\r", "") - sub_node.data = sub_node.data.replace("\n", "") - sub_node.data = sub_node.data.rstrip() - - # Fix all the semicolon separated attributes to be sorted, and we also - # remove the dups. - if node.attributes: - for (name, value) in node.attributes.items(): - sorted_list = sorted(value.split(';')) - unique_list = [] - [unique_list.append(i) for i in sorted_list if not unique_list.count(i)] - node.setAttribute(name, ';'.join(unique_list)) - if not value: - node.removeAttribute(name) - - if node.childNodes: - node.normalize() - - # For each node, take a copy, and remove it from the list. - node_array = [] - while node.childNodes and node.childNodes[0]: - # Take a copy of the node and remove it from the list. - current = node.childNodes[0] - node.removeChild(current) - - # If the child is a filter, we want to append all its children - # to this same list. - if current.nodeName == 'Filter': - node_array.extend(FlattenFilter(current)) - else: - node_array.append(current) - - - # Sort the list. - node_array.sort(CmpNode()) - - # Insert the nodes in the correct order. - for new_node in node_array: - # But don't append empty tool node. - if new_node.nodeName == 'Tool': - if new_node.attributes and new_node.attributes.length == 1: - # This one was empty. - continue - if new_node.nodeName == 'UserMacro': - continue - node.appendChild(new_node) - -def GetConfiguationNodes(vcproj): - #TODO(nsylvain): Find a better way to navigate the xml. - nodes = [] - for node in vcproj.childNodes: - if node.nodeName == "Configurations": - for sub_node in node.childNodes: - if sub_node.nodeName == "Configuration": - nodes.append(sub_node) - - return nodes - -def GetChildrenVsprops(filename): - dom = parse(filename) - if dom.documentElement.attributes: - vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') - return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) - return [] - -def SeekToNode(node1, child2): - # A text node does not have properties. - if child2.nodeType == Node.TEXT_NODE: - return None - - # Get the name of the current node. - current_name = child2.getAttribute("Name") - if not current_name: - # There is no name. We don't know how to merge. - return None - - # Look through all the nodes to find a match. - for sub_node in node1.childNodes: - if sub_node.nodeName == child2.nodeName: - name = sub_node.getAttribute("Name") - if name == current_name: - return sub_node - - # No match. We give up. - return None - -def MergeAttributes(node1, node2): - # No attributes to merge? - if not node2.attributes: - return - - for (name, value2) in node2.attributes.items(): - # Don't merge the 'Name' attribute. - if name == 'Name': - continue - value1 = node1.getAttribute(name) - if value1: - # The attribute exist in the main node. If it's equal, we leave it - # untouched, otherwise we concatenate it. - if value1 != value2: - node1.setAttribute(name, ';'.join([value1, value2])) - else: - # The attribute does nto exist in the main node. We append this one. - node1.setAttribute(name, value2) - - # If the attribute was a property sheet attributes, we remove it, since - # they are useless. - if name == 'InheritedPropertySheets': - node1.removeAttribute(name) - -def MergeProperties(node1, node2): - MergeAttributes(node1, node2) - for child2 in node2.childNodes: - child1 = SeekToNode(node1, child2) - if child1: - MergeProperties(child1, child2) - else: - node1.appendChild(child2.cloneNode(True)) - -def main(argv): - global REPLACEMENTS - global ARGUMENTS - ARGUMENTS = argv - """Main function of this vcproj prettifier.""" - - # check if we have exactly 1 parameter. - if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) - return - - # Parse the keys - for i in range(2, len(argv)): - (key, value) = argv[i].split('=') - REPLACEMENTS[key] = value - - # Open the vcproj and parse the xml. - dom = parse(argv[1]) - - # First thing we need to do is find the Configuration Node and merge them - # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): - # Get the property sheets associated with this configuration. - vsprops = configuration_node.getAttribute('InheritedPropertySheets') - - # Fix the filenames to be absolute. - vsprops_list = FixFilenames(vsprops.strip().split(';'), - os.path.dirname(argv[1])) - - # Extend the list of vsprops with all vsprops contained in the current - # vsprops. - for current_vsprops in vsprops_list: - vsprops_list.extend(GetChildrenVsprops(current_vsprops)) - - # Now that we have all the vsprops, we need to merge them. - for current_vsprops in vsprops_list: - MergeProperties(configuration_node, - parse(current_vsprops).documentElement) - - # Now that everything is merged, we need to cleanup the xml. - CleanupVcproj(dom.documentElement) - - # Finally, we use the prett xml function to print the vcproj back to the - # user. - #print dom.toprettyxml(newl="\n") - PrettyPrintNode(dom.documentElement) - -if __name__ == '__main__': - main(sys.argv) diff --git a/mozc_version_template.txt b/mozc_version_template.txt index 2715f34..2b8d389 100755 --- a/mozc_version_template.txt +++ b/mozc_version_template.txt @@ -1,4 +1,4 @@ MAJOR=0 MINOR=12 -BUILD=410 +BUILD=422 REVISION=102 diff --git a/third_party/gyp/AUTHORS b/third_party/gyp/AUTHORS deleted file mode 100644 index f0b6752..0000000 --- a/third_party/gyp/AUTHORS +++ /dev/null @@ -1,5 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. -Steven Knight diff --git a/third_party/gyp/DEPS b/third_party/gyp/DEPS deleted file mode 100644 index 4a46eba..0000000 --- a/third_party/gyp/DEPS +++ /dev/null @@ -1,8 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -deps = { - "scons": - "svn://chrome-svn.corp.google.com/chrome/trunk/src/third_party/scons", -} diff --git a/third_party/gyp/LICENSE b/third_party/gyp/LICENSE deleted file mode 100644 index ab6b011..0000000 --- a/third_party/gyp/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/third_party/gyp/MANIFEST b/third_party/gyp/MANIFEST deleted file mode 100644 index 925ecc1..0000000 --- a/third_party/gyp/MANIFEST +++ /dev/null @@ -1,21 +0,0 @@ -setup.py -gyp -LICENSE -AUTHORS -pylib/gyp/MSVSNew.py -pylib/gyp/MSVSProject.py -pylib/gyp/MSVSToolFile.py -pylib/gyp/MSVSUserFile.py -pylib/gyp/MSVSVersion.py -pylib/gyp/SCons.py -pylib/gyp/__init__.py -pylib/gyp/common.py -pylib/gyp/input.py -pylib/gyp/xcodeproj_file.py -pylib/gyp/generator/__init__.py -pylib/gyp/generator/gypd.py -pylib/gyp/generator/gypsh.py -pylib/gyp/generator/make.py -pylib/gyp/generator/msvs.py -pylib/gyp/generator/scons.py -pylib/gyp/generator/xcode.py diff --git a/third_party/gyp/PRESUBMIT.py b/third_party/gyp/PRESUBMIT.py deleted file mode 100755 index 4c99288..0000000 --- a/third_party/gyp/PRESUBMIT.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2010, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -EXCLUDED_PATHS = () - - -def CheckChangeOnUpload(input_api, output_api): - report = [] - black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS - sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) - report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( - input_api, output_api, sources)) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS - sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) - report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( - input_api, output_api, sources)) - report.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - 'http://gyp-status.appspot.com/status', - 'http://gyp-status.appspot.com/current')) - return report diff --git a/third_party/gyp/codereview.settings b/third_party/gyp/codereview.settings deleted file mode 100644 index a04a244..0000000 --- a/third_party/gyp/codereview.settings +++ /dev/null @@ -1,10 +0,0 @@ -# This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: gyp-developer@googlegroups.com -VIEW_VC: http://code.google.com/p/gyp/source/detail?r= -TRY_ON_UPLOAD: True -TRYSERVER_PROJECT: gyp -TRYSERVER_PATCHLEVEL: 0 -TRYSERVER_ROOT: trunk -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl - diff --git a/third_party/gyp/gyp b/third_party/gyp/gyp deleted file mode 100755 index d52e711..0000000 --- a/third_party/gyp/gyp +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -# TODO(mark): sys.path manipulation is some temporary testing stuff. -try: - import gyp -except ImportError, e: - import os.path - sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) - import gyp - -if __name__ == '__main__': - sys.exit(gyp.main(sys.argv[1:])) diff --git a/third_party/gyp/gyp.bat b/third_party/gyp/gyp.bat deleted file mode 100755 index 90fbc6d..0000000 --- a/third_party/gyp/gyp.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0/gyp" %* diff --git a/third_party/gyp/gyp_dummy.c b/third_party/gyp/gyp_dummy.c deleted file mode 100644 index fb55bbc..0000000 --- a/third_party/gyp/gyp_dummy.c +++ /dev/null @@ -1,7 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -int main() { - return 0; -} diff --git a/third_party/gyp/gyptest.py b/third_party/gyp/gyptest.py deleted file mode 100755 index d9677db..0000000 --- a/third_party/gyp/gyptest.py +++ /dev/null @@ -1,255 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -__doc__ = """ -gyptest.py -- test runner for GYP tests. -""" - -import os -import optparse -import subprocess -import sys - -class CommandRunner: - """ - Executor class for commands, including "commands" implemented by - Python functions. - """ - verbose = True - active = True - - def __init__(self, dictionary={}): - self.subst_dictionary(dictionary) - - def subst_dictionary(self, dictionary): - self._subst_dictionary = dictionary - - def subst(self, string, dictionary=None): - """ - Substitutes (via the format operator) the values in the specified - dictionary into the specified command. - - The command can be an (action, string) tuple. In all cases, we - perform substitution on strings and don't worry if something isn't - a string. (It's probably a Python function to be executed.) - """ - if dictionary is None: - dictionary = self._subst_dictionary - if dictionary: - try: - string = string % dictionary - except TypeError: - pass - return string - - def display(self, command, stdout=None, stderr=None): - if not self.verbose: - return - if type(command) == type(()): - func = command[0] - args = command[1:] - s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) - if type(command) == type([]): - # TODO: quote arguments containing spaces - # TODO: handle meta characters? - s = ' '.join(command) - else: - s = self.subst(command) - if not s.endswith('\n'): - s += '\n' - sys.stdout.write(s) - sys.stdout.flush() - - def execute(self, command, stdout=None, stderr=None): - """ - Executes a single command. - """ - if not self.active: - return 0 - if type(command) == type(''): - command = self.subst(command) - cmdargs = shlex.split(command) - if cmdargs[0] == 'cd': - command = (os.chdir,) + tuple(cmdargs[1:]) - if type(command) == type(()): - func = command[0] - args = command[1:] - return func(*args) - else: - if stdout is sys.stdout: - # Same as passing sys.stdout, except python2.4 doesn't fail on it. - subout = None - else: - # Open pipe for anything else so Popen works on python2.4. - subout = subprocess.PIPE - if stderr is sys.stderr: - # Same as passing sys.stderr, except python2.4 doesn't fail on it. - suberr = None - elif stderr is None: - # Merge with stdout if stderr isn't specified. - suberr = subprocess.STDOUT - else: - # Open pipe for anything else so Popen works on python2.4. - suberr = subprocess.PIPE - p = subprocess.Popen(command, - shell=(sys.platform == 'win32'), - stdout=subout, - stderr=suberr) - p.wait() - if stdout is None: - self.stdout = p.stdout.read() - elif stdout is not sys.stdout: - stdout.write(p.stdout.read()) - if stderr not in (None, sys.stderr): - stderr.write(p.stderr.read()) - return p.returncode - - def run(self, command, display=None, stdout=None, stderr=None): - """ - Runs a single command, displaying it first. - """ - if display is None: - display = command - self.display(display) - return self.execute(command, stdout, stderr) - - -class Unbuffered: - def __init__(self, fp): - self.fp = fp - def write(self, arg): - self.fp.write(arg) - self.fp.flush() - def __getattr__(self, attr): - return getattr(self.fp, attr) - -sys.stdout = Unbuffered(sys.stdout) -sys.stderr = Unbuffered(sys.stderr) - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - if '.svn' in dirs: - dirs.remove('.svn') - result.extend([ os.path.join(root, f) for f in files - if f.startswith('gyptest') and f.endswith('.py') ]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" - parser = optparse.OptionParser(usage=usage) - parser.add_option("-a", "--all", action="store_true", - help="run all tests") - parser.add_option("-C", "--chdir", action="store", default=None, - help="chdir to the specified directory") - parser.add_option("-f", "--format", action="store", default='', - help="run tests with the specified formats") - parser.add_option("-l", "--list", action="store_true", - help="list available tests and exit") - parser.add_option("-n", "--no-exec", action="store_true", - help="no execute, just print the command line") - parser.add_option("--passed", action="store_true", - help="report passed tests") - parser.add_option("--path", action="append", default=[], - help="additional $PATH directory") - parser.add_option("-q", "--quiet", action="store_true", - help="quiet, don't print test command lines") - opts, args = parser.parse_args(argv[1:]) - - if opts.chdir: - os.chdir(opts.chdir) - - if opts.path: - os.environ['PATH'] += ':' + ':'.join(opts.path) - - if not args: - if not opts.all: - sys.stderr.write('Specify -a to get all tests.\n') - return 1 - args = ['test'] - - tests = [] - for arg in args: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - tests.append(arg) - - if opts.list: - for test in tests: - print test - sys.exit(0) - - CommandRunner.verbose = not opts.quiet - CommandRunner.active = not opts.no_exec - cr = CommandRunner() - - os.environ['PYTHONPATH'] = os.path.abspath('test/lib') - if not opts.quiet: - sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) - - passed = [] - failed = [] - no_result = [] - - if opts.format: - format_list = opts.format.split(',') - else: - # TODO: not duplicate this mapping from pylib/gyp/__init__.py - format_list = [ { - 'freebsd7': 'make', - 'freebsd8': 'make', - 'cygwin': 'msvs', - 'win32': 'msvs', - 'linux2': 'make', - 'darwin': 'xcode', - }[sys.platform] ] - - for format in format_list: - os.environ['TESTGYP_FORMAT'] = format - if not opts.quiet: - sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) - - for test in tests: - status = cr.run([sys.executable, test], - stdout=sys.stdout, - stderr=sys.stderr) - if status == 2: - no_result.append(test) - elif status: - failed.append(test) - else: - passed.append(test) - - if not opts.quiet: - def report(description, tests): - if tests: - if len(tests) == 1: - sys.stdout.write("\n%s the following test:\n" % description) - else: - fmt = "\n%s the following %d tests:\n" - sys.stdout.write(fmt % (description, len(tests))) - sys.stdout.write("\t" + "\n\t".join(tests) + "\n") - - if opts.passed: - report("Passed", passed) - report("Failed", failed) - report("No result from", no_result) - - if failed: - return 1 - else: - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/third_party/gyp/pylib/gyp/MSVSNew.py b/third_party/gyp/pylib/gyp/MSVSNew.py deleted file mode 100644 index f18e5b6..0000000 --- a/third_party/gyp/pylib/gyp/MSVSNew.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation for SCons.""" - -import common -import os -import random - -# hashlib is supplied as of Python 2.5 as the replacement interface for md5 -# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_md5 = hashlib.md5 -except ImportError: - import md5 - _new_md5 = md5.new - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', - 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', -} - -#------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed='msvs_new'): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = _new_md5(str(seed) + str(name)).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] - + '-' + d[20:32] + '}') - return guid - -#------------------------------------------------------------------------------ - - -class MSVSFolder: - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name = None, entries = None, - guid = None, items = None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed='msvs_folder') - return self.guid - - -#------------------------------------------------------------------------------ - - -class MSVSProject: - """Visual Studio project.""" - - def __init__(self, path, name = None, dependencies = None, guid = None, - config_platform_overrides = None): - """Initializes the project. - - Args: - path: Relative path to project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - """ - self.path = path - self.guid = guid - - if name: - self.name = name - else: - # Use project filename - self.name = os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - -#------------------------------------------------------------------------------ - - -class MSVSSolution: - """Visual Studio solution.""" - - def __init__(self, path, version, entries=None, variants=None, - websiteProperties=True): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ['Debug|Win32', 'Release|Win32'] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - - def Write(self, writer=common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = [] - entries_to_check = self.entries[:] - while entries_to_check: - # Pop from the beginning of the list to preserve the user's order. - e = entries_to_check.pop(0) - - # A project or folder can only appear once in the solution's folder tree. - # This also protects from cycles. - if e in all_entries: - #raise IndexError('Entry "%s" appears more than once in solution' % - # e.name) - continue - - all_entries.append(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - # Sort by name then guid (so things are in order on vs2008). - def NameThenGuid(a, b): - if a.name < b.name: return -1 - if a.name > b.name: return 1 - if a.get_guid() < b.get_guid(): return -1 - if a.get_guid() > b.get_guid(): return 1 - return 0 - - all_entries = sorted(all_entries, NameThenGuid) - - # Open file and print header - f = writer(self.path) - f.write('Microsoft Visual Studio Solution File, ' - 'Format Version %s\r\n' % self.version.SolutionVersion()) - f.write('# %s\r\n' % self.version.Description()) - - # Project entries - for e in all_entries: - f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - e.path.replace('/', '\\'), # Folder name (again) - e.get_guid(), # Entry GUID - )) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - '\tEndProjectSection\r\n') - - if isinstance(e, MSVSFolder): - if e.items: - f.write('\tProjectSection(SolutionItems) = preProject\r\n') - for i in e.items: - f.write('\t\t%s = %s\r\n' % (i, i)) - f.write('\tEndProjectSection\r\n') - - if isinstance(e, MSVSProject): - if e.dependencies: - f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') - for d in e.dependencies: - f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) - f.write('\tEndProjectSection\r\n') - - f.write('EndProject\r\n') - - # Global section - f.write('Global\r\n') - - # Configurations (variants) - f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') - for v in self.variants: - f.write('\t\t%s = %s\r\n' % (v, v)) - f.write('\tEndGlobalSection\r\n') - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - - # Enable project in this solution configuration. - f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - f.write('\tEndGlobalSection\r\n') - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') - f.write('\t\tHideSolutionNode = FALSE\r\n') - f.write('\tEndGlobalSection\r\n') - - # Folder mappings - # TODO(rspangler): Should omit this section if there are no folders - f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) - f.write('\tEndGlobalSection\r\n') - - f.write('EndGlobal\r\n') - - f.close() diff --git a/third_party/gyp/pylib/gyp/MSVSProject.py b/third_party/gyp/pylib/gyp/MSVSProject.py deleted file mode 100644 index 18d98e7..0000000 --- a/third_party/gyp/pylib/gyp/MSVSProject.py +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import common -import xml.dom -import xml.dom.minidom -import MSVSNew - -#------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self.name = name - self.attrs = attrs or {} - - def CreateElement(self, doc): - """Creates an element for the tool. - - Args: - doc: xml.dom.Document object to use for node creation. - - Returns: - A new xml.dom.Element for the tool. - """ - node = doc.createElement('Tool') - node.setAttribute('Name', self.name) - for k, v in self.attrs.items(): - node.setAttribute(k, v) - return node - - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - """ - self.project_path = project_path - self.doc = None - self.version = version - - def Create(self, name, guid=None, platforms=None): - """Creates the project document. - - Args: - name: Name of the project. - guid: GUID to use for project, if not None. - """ - self.name = name - self.guid = guid or MSVSNew.MakeGuid(self.project_path) - - # Default to Win32 for platforms. - if not platforms: - platforms = ['Win32'] - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('ProjectType', 'Visual C++') - self.n_root.setAttribute('Version', self.version.ProjectVersion()) - self.n_root.setAttribute('Name', self.name) - self.n_root.setAttribute('ProjectGUID', self.guid) - self.n_root.setAttribute('RootNamespace', self.name) - self.n_root.setAttribute('Keyword', 'Win32Proj') - - # Add platform list - n_platform = self.doc.createElement('Platforms') - self.n_root.appendChild(n_platform) - for platform in platforms: - n = self.doc.createElement('Platform') - n.setAttribute('Name', platform) - n_platform.appendChild(n) - - # Add tool files section - self.n_tool_files = self.doc.createElement('ToolFiles') - self.n_root.appendChild(self.n_tool_files) - - # Add configurations section - self.n_configs = self.doc.createElement('Configurations') - self.n_root.appendChild(self.n_configs) - - # Add empty References section - self.n_root.appendChild(self.doc.createElement('References')) - - # Add files section - self.n_files = self.doc.createElement('Files') - self.n_root.appendChild(self.n_files) - # Keep a dict keyed on filename to speed up access. - self.n_files_dict = dict() - - # Add empty Globals section - self.n_root.appendChild(self.doc.createElement('Globals')) - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - n_tool = self.doc.createElement('ToolFile') - n_tool.setAttribute('RelativePath', path) - self.n_tool_files.appendChild(n_tool) - - def _AddConfigToNode(self, parent, config_type, config_name, attrs=None, - tools=None): - """Adds a configuration to the parent node. - - Args: - parent: Destination node. - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - n_config = self.doc.createElement(config_type) - n_config.setAttribute('Name', config_name) - for k, v in attrs.items(): - n_config.setAttribute(k, v) - parent.appendChild(n_config) - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - n_config.appendChild(t.CreateElement(self.doc)) - else: - n_config.appendChild(Tool(t).CreateElement(self.doc)) - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = self.doc.createElement('Filter') - node.setAttribute('Name', f.name) - self._AddFilesToNode(node, f.contents) - else: - node = self.doc.createElement('File') - node.setAttribute('RelativePath', f) - self.n_files_dict[f] = node - parent.appendChild(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.n_files, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.n_files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools) - - def Write(self, writer=common.WriteOnDiff): - """Writes the project file.""" - f = writer(self.project_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/third_party/gyp/pylib/gyp/MSVSToolFile.py b/third_party/gyp/pylib/gyp/MSVSToolFile.py deleted file mode 100644 index bd8fd9e..0000000 --- a/third_party/gyp/pylib/gyp/MSVSToolFile.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import common -import xml.dom -import xml.dom.minidom - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - """ - self.tool_file_path = tool_file_path - self.doc = None - - def Create(self, name): - """Creates the tool file document. - - Args: - name: Name of the tool file. - """ - self.name = name - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('Version', '8.00') - self.n_root.setAttribute('Name', self.name) - - # Add rules section - self.n_rules = self.doc.createElement('Rules') - self.n_root.appendChild(self.n_rules) - - def AddCustomBuildRule(self, name, cmd, description, - additional_dependencies, - outputs, extensions): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - n_rule = self.doc.createElement('CustomBuildRule') - n_rule.setAttribute('Name', name) - n_rule.setAttribute('ExecutionDescription', description) - n_rule.setAttribute('CommandLine', cmd) - n_rule.setAttribute('Outputs', ';'.join(outputs)) - n_rule.setAttribute('FileExtensions', ';'.join(extensions)) - n_rule.setAttribute('AdditionalDependencies', - ';'.join(additional_dependencies)) - self.n_rules.appendChild(n_rule) - - def Write(self, writer=common.WriteOnDiff): - """Writes the tool file.""" - f = writer(self.tool_file_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/third_party/gyp/pylib/gyp/MSVSUserFile.py b/third_party/gyp/pylib/gyp/MSVSUserFile.py deleted file mode 100644 index 21098e3..0000000 --- a/third_party/gyp/pylib/gyp/MSVSUserFile.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/python2.4 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import common -import os -import re -import socket # for gethostname -import xml.dom -import xml.dom.minidom - - -#------------------------------------------------------------------------------ - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if '/' in command or '\\' in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get('PATH','').split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r'[ \t\n]', arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.doc = None - - def Create(self, name): - """Creates the user file document. - - Args: - name: Name of the user file. - """ - self.name = name - - # Create XML doc - xml_impl = xml.dom.getDOMImplementation() - self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None) - - # Add attributes to root element - self.n_root = self.doc.documentElement - self.n_root.setAttribute('Version', self.version.ProjectVersion()) - self.n_root.setAttribute('Name', self.name) - - # Add configurations section - self.n_configs = self.doc.createElement('Configurations') - self.n_root.appendChild(self.n_configs) - - def _AddConfigToNode(self, parent, config_type, config_name): - """Adds a configuration to the parent node. - - Args: - parent: Destination node. - config_type: Type of configuration node. - config_name: Configuration name. - """ - # Add configuration node and its attributes - n_config = self.doc.createElement(config_type) - n_config.setAttribute('Name', config_name) - parent.appendChild(n_config) - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self._AddConfigToNode(self.n_configs, 'Configuration', name) - - - def AddDebugSettings(self, config_name, command, environment = {}, - working_directory=""): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - n_cmd = self.doc.createElement('DebugSettings') - abs_command = _FindCommandInPath(command[0]) - n_cmd.setAttribute('Command', abs_command) - n_cmd.setAttribute('WorkingDirectory', working_directory) - n_cmd.setAttribute('CommandArguments', " ".join(command[1:])) - n_cmd.setAttribute('RemoteMachine', socket.gethostname()) - - if environment and isinstance(environment, dict): - n_cmd.setAttribute('Environment', - " ".join(['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()])) - else: - n_cmd.setAttribute('Environment', '') - - n_cmd.setAttribute('EnvironmentMerge', 'true') - - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - n_cmd.setAttribute('Attach', 'false') - n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger - n_cmd.setAttribute('Remote', '1') - n_cmd.setAttribute('RemoteCommand', '') - n_cmd.setAttribute('HttpUrl', '') - n_cmd.setAttribute('PDBPath', '') - n_cmd.setAttribute('SQLDebugging', '') - n_cmd.setAttribute('DebuggerFlavor', '0') - n_cmd.setAttribute('MPIRunCommand', '') - n_cmd.setAttribute('MPIRunArguments', '') - n_cmd.setAttribute('MPIRunWorkingDirectory', '') - n_cmd.setAttribute('ApplicationCommand', '') - n_cmd.setAttribute('ApplicationArguments', '') - n_cmd.setAttribute('ShimCommand', '') - n_cmd.setAttribute('MPIAcceptMode', '') - n_cmd.setAttribute('MPIAcceptFilter', '') - - # Find the config, and add it if it doesn't exist. - found = False - for config in self.n_configs.childNodes: - if config.getAttribute("Name") == config_name: - found = True - - if not found: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - for config in self.n_configs.childNodes: - if config.getAttribute("Name") == config_name: - config.appendChild(n_cmd) - break - - def Write(self, writer=common.WriteOnDiff): - """Writes the user file.""" - f = writer(self.user_file_path) - self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') - f.close() - -#------------------------------------------------------------------------------ diff --git a/third_party/gyp/pylib/gyp/MSVSVersion.py b/third_party/gyp/pylib/gyp/MSVSVersion.py deleted file mode 100755 index d2c17bb..0000000 --- a/third_party/gyp/pylib/gyp/MSVSVersion.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import os -import re -import subprocess -import sys - - -class VisualStudioVersion: - """Information regarding a version of Visual Studio.""" - - def __init__(self, short_name, description, - solution_version, project_version, flat_sln): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - -def _RegistryGetValue(key, value): - """Use reg.exe to read a paricular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - key: The registry key to read from. - value: The particular value to read. - Return: - The contents there, or None for failure. - """ - # Skip if not on Windows. - if sys.platform not in ('win32', 'cygwin'): - return None - # Run reg.exe. - cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'), - 'query', key, '/v', value] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - text = p.communicate()[0] - # Require a successful return value. - if p.returncode: - return None - # Extract value. - match = re.search(r'REG_\w+[ ]+([^\r]+)\r\n', text) - if not match: - return None - return match.group(1) - - -def _CreateVersion(name): - versions = { - '2008': VisualStudioVersion('2008', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=False), - '2008e': VisualStudioVersion('2008e', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=True), - '2005': VisualStudioVersion('2005', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=False), - '2005e': VisualStudioVersion('2005e', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=True), - } - return versions[str(name)] - - -def _DetectVisualStudioVersions(): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Only versions 8-9 are considered. - Possibilities are: - 2005 - Visual Studio 2005 (8) - 2008 - Visual Studio 2008 (9) - """ - version_to_year = {'8.0': '2005', '9.0': '2008'} - versions = [] - for version in ('9.0', '8.0'): - # Get the install dir for this version. - key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version - path = _RegistryGetValue(key, 'InstallDir') - if not path: - continue - # Check for full. - if os.path.exists(os.path.join(path, 'devenv.exe')): - # Add this one. - versions.append(_CreateVersion(version_to_year[version])) - # Check for express. - elif os.path.exists(os.path.join(path, 'vcexpress.exe')): - # Add this one. - versions.append(_CreateVersion(version_to_year[version] + 'e')) - return versions - - -def SelectVisualStudioVersion(version='auto'): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == 'auto': - version = os.environ.get('GYP_MSVS_VERSION', 'auto') - # In auto mode, pick the most preferred version present. - if version == 'auto': - versions = _DetectVisualStudioVersions() - if not versions: - # Default to 2005. - return _CreateVersion('2005') - return versions[0] - # Convert version string into a version object. - return _CreateVersion(version) diff --git a/third_party/gyp/pylib/gyp/SCons.py b/third_party/gyp/pylib/gyp/SCons.py deleted file mode 100644 index 9c57bcb..0000000 --- a/third_party/gyp/pylib/gyp/SCons.py +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -SCons generator. - -This contains class definitions and supporting functions for generating -pieces of SCons files for the different types of GYP targets. -""" - -import os - - -def WriteList(fp, list, prefix='', - separator=',\n ', - preamble=None, - postamble=None): - fp.write(preamble or '') - fp.write((separator or ' ').join([prefix + l for l in list])) - fp.write(postamble or '') - - -class TargetBase(object): - """ - Base class for a SCons representation of a GYP target. - """ - is_ignored = False - target_prefix = '' - target_suffix = '' - def __init__(self, spec): - self.spec = spec - def full_product_name(self): - """ - Returns the full name of the product being built: - - * Uses 'product_name' if it's set, else prefix + 'target_name'. - * Prepends 'product_dir' if set. - * Appends SCons suffix variables for the target type (or - product_extension). - """ - suffix = self.target_suffix - product_extension = self.spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = self.spec.get('product_prefix', self.target_prefix) - name = self.spec['target_name'] - name = prefix + self.spec.get('product_name', name) + suffix - product_dir = self.spec.get('product_dir') - if product_dir: - name = os.path.join(product_dir, name) - else: - name = os.path.join(self.out_dir, name) - return name - - def write_input_files(self, fp): - """ - Writes the definition of the input files (sources). - """ - sources = self.spec.get('sources') - if not sources: - fp.write('\ninput_files = []\n') - return - preamble = '\ninput_files = [\n ' - postamble = ',\n]\n' - WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble) - - def builder_call(self): - """ - Returns the actual SCons builder call to build this target. - """ - name = self.full_product_name() - return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name) - def write_target(self, fp, src_dir='', pre=''): - """ - Writes the lines necessary to build this target. - """ - fp.write('\n' + pre) - fp.write('_outputs = %s\n' % self.builder_call()) - fp.write('target_files.extend(_outputs)\n') - - -class NoneTarget(TargetBase): - """ - A GYP target type of 'none', implicitly or explicitly. - """ - def write_target(self, fp, pre=''): - fp.write('\ntarget_files.extend(input_files)\n') - - -class SettingsTarget(TargetBase): - """ - A GYP target type of 'settings'. - """ - is_ignored = True - - -compilable_sources_template = """ -_result = [] -for infile in input_files: - if env.compilable(infile): - if (type(infile) == type('') - and (infile.startswith(%(src_dir)r) - or not os.path.isabs(env.subst(infile)))): - # Force files below the build directory by replacing all '..' - # elements in the path with '__': - base, ext = os.path.splitext(os.path.normpath(infile)) - base = [d == '..' and '__' or d for d in base.split('/')] - base = os.path.join(*base) - object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base - if not infile.startswith(%(src_dir)r): - infile = %(src_dir)r + infile - infile = env.%(name)s(object, infile)[0] - else: - infile = env.%(name)s(infile)[0] - _result.append(infile) -input_files = _result -""" - -class CompilableSourcesTargetBase(TargetBase): - """ - An abstract base class for targets that compile their source files. - - We explicitly transform compilable files into object files, - even though SCons could infer that for us, because we want - to control where the object file ends up. (The implicit rules - in SCons always put the object file next to the source file.) - """ - intermediate_builder_name = None - def write_target(self, fp, src_dir='', pre=''): - if self.intermediate_builder_name is None: - raise NotImplementedError - if src_dir and not src_dir.endswith('/'): - src_dir += '/' - variables = { - 'src_dir': src_dir, - 'name': self.intermediate_builder_name, - } - fp.write(compilable_sources_template % variables) - super(CompilableSourcesTargetBase, self).write_target(fp) - - -class ProgramTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'executable'. - """ - builder_name = 'GypProgram' - intermediate_builder_name = 'StaticObject' - target_prefix = '${PROGPREFIX}' - target_suffix = '${PROGSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -class StaticLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'static_library'. - """ - builder_name = 'GypStaticLibrary' - intermediate_builder_name = 'StaticObject' - target_prefix = '${LIBPREFIX}' - target_suffix = '${LIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class SharedLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'shared_library'. - """ - builder_name = 'GypSharedLibrary' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class LoadableModuleTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'loadable_module'. - """ - builder_name = 'GypLoadableModule' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -TargetMap = { - None : NoneTarget, - 'none' : NoneTarget, - 'settings' : SettingsTarget, - 'executable' : ProgramTarget, - 'static_library' : StaticLibraryTarget, - 'shared_library' : SharedLibraryTarget, - 'loadable_module' : LoadableModuleTarget, -} - -def Target(spec): - return TargetMap[spec.get('type')](spec) diff --git a/third_party/gyp/pylib/gyp/__init__.py b/third_party/gyp/pylib/gyp/__init__.py deleted file mode 100644 index 4b088f6..0000000 --- a/third_party/gyp/pylib/gyp/__init__.py +++ /dev/null @@ -1,461 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import gyp.input -import optparse -import os.path -import re -import shlex -import sys - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = 'general' -DEBUG_VARIABLES = 'variables' -DEBUG_INCLUDES = 'includes' - -def DebugOutput(mode, message): - if mode in gyp.debug.keys(): - print "%s: %s" % (mode.upper(), message) - -def FindBuildFiles(): - extension = '.gyp' - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file[-len(extension):] == extension: - build_files.append(file) - return build_files - - -def Load(build_files, format, default_variables={}, - includes=[], depth='.', params={}, check=False, circular_check=True): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables['GENERATOR'] = format - - generator_name = 'gyp.generator.' + format - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateVariables', None): - generator.CalculateVariables(default_variables, params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - 'generator_wants_absolute_build_file_paths': - getattr(generator, 'generator_wants_absolute_build_file_paths', False), - 'generator_handles_variants': - getattr(generator, 'generator_handles_variants', False), - 'non_configuration_keys': - getattr(generator, 'generator_additional_non_configuration_keys', []), - 'path_sections': - getattr(generator, 'generator_additional_path_sections', []), - 'extra_sources_for_rules': - getattr(generator, 'generator_extra_sources_for_rules', []), - 'generator_supports_multiple_toolsets': - getattr(generator, 'generator_supports_multiple_toolsets', False), - } - - # Process the input specific to this generator. - result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check) - return [generator] + result - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = { } - for item in name_value_list: - tokens = item.split('=', 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - -def FormatOpt(opt, value): - if opt.startswith('--'): - return '%s=%s' % (opt, value) - return opt + value - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - flags.append(FormatOpt(flag, predicate(flag_value))) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): - opt = metadata['opt'] - value = getattr(options, name) - value_predicate = metadata['type'] == 'path' and FixPath or Noop - action = metadata['action'] - env_name = metadata['env_name'] - if action == 'append': - flags.extend(RegenerateAppendFlag(opt, value, value_predicate, - env_name, options)) - elif action in ('store', None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ('store_true', 'store_false'): - if ((action == 'store_true' and value) or - (action == 'store_false' and not value)): - flags.append(opt) - elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' - 'for %s flag %r env_name %r' % (action, opt, - env_name)) - else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) - - return flags - -class RegeneratableOptionParser(optparse.OptionParser): - def __init__(self): - self.__regeneratable_options = {} - optparse.OptionParser.__init__(self) - - def add_option(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as OptionParser.add_option, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop('env_name', None) - if 'dest' in kw and kw.pop('regenerate', True): - dest = kw['dest'] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get('type') - if type == 'path': - kw['type'] = 'string' - - self.__regeneratable_options[dest] = { - 'action': kw.get('action'), - 'type': type, - 'env_name': env_name, - 'opt': args[0], - } - - optparse.OptionParser.add_option(self, *args, **kw) - - def parse_args(self, *args): - values, args = optparse.OptionParser.parse_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - -def main(args): - my_name = os.path.basename(sys.argv[0]) - - parser = RegeneratableOptionParser() - usage = 'usage: %s [options ...] [build_file ...]' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', - env_name='GYP_DEFINES', - help='sets variable VAR to value VAL') - parser.add_option('-f', '--format', dest='formats', action='append', - env_name='GYP_GENERATORS', regenerate=False, - help='output formats to generate') - parser.add_option('--msvs-version', dest='msvs_version', - regenerate=False, - help='Deprecated; use -G msvs_version=MSVS_VERSION instead') - parser.add_option('-I', '--include', dest='includes', action='append', - metavar='INCLUDE', type='path', - help='files to include in all loaded .gyp files') - parser.add_option('--depth', dest='depth', metavar='PATH', type='path', - help='set DEPTH gyp variable to a relative path to PATH') - parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', - action='append', default=[], help='turn on a debugging ' - 'mode for debugging GYP. Supported modes are "variables" ' - 'and "general"') - parser.add_option('-S', '--suffix', dest='suffix', default='', - help='suffix to add to generated files') - parser.add_option('-G', dest='generator_flags', action='append', default=[], - metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', - help='sets generator flag FLAG to VAL') - parser.add_option('--generator-output', dest='generator_output', - action='store', default=None, metavar='DIR', type='path', - env_name='GYP_GENERATOR_OUTPUT', - help='puts generated build files under DIR') - parser.add_option('--ignore-environment', dest='use_environment', - action='store_false', default=True, regenerate=False, - help='do not read options from environment variables') - parser.add_option('--check', dest='check', action='store_true', - help='check format of gyp files') - parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', - default=None, metavar='DIR', type='path', - help='directory to use as the root of the source tree') - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_option('--no-circular-check', dest='circular_check', - action='store_false', default=True, regenerate=False, - help="don't check for circular relationships between files") - - # We read a few things from ~/.gyp, so set up a var for that. - home_vars = ['HOME'] - if sys.platform in ('cygwin', 'win32'): - home_vars.append('USERPROFILE') - home = None - home_dot_gyp = None - for home_var in home_vars: - home = os.getenv(home_var) - if home != None: - home_dot_gyp = os.path.join(home, '.gyp') - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - - # TODO(thomasvl): add support for ~/.gyp/defaults - - options, build_files_arg = parser.parse_args(args) - build_files = build_files_arg - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get('GYP_GENERATORS', []) - if generate_formats: - generate_formats = re.split('[\s,]', generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - options.formats = [ {'darwin': 'xcode', - 'win32': 'msvs', - 'cygwin': 'msvs', - 'freebsd7': 'make', - 'freebsd8': 'make', - 'linux2': 'make', - 'openbsd4': 'make', - 'sunos5': 'make',}[sys.platform] ] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get('GYP_GENERATOR_OUTPUT') - if g_o: - options.generator_output = g_o - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, 'running with these options:') - for option, value in sorted(options.__dict__.items()): - if option[0] == '_': - continue - if isinstance(value, basestring): - DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value)) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value))) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \ - (my_name, my_name) - return 1 - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise Exception, \ - 'Could not automatically locate src directory. This is a ' + \ - 'temporary Chromium feature that will be removed. Use ' + \ - '--depth as a workaround.' - - # If toplevel-dir is not set, we assume that depth is the root of our source - # tree. - if not options.toplevel_dir: - options.toplevel_dir = options.depth - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv('GYP_DEFINES') - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s" % cmdline_default_variables) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp != None: - default_include = os.path.join(home_dot_gyp, 'include.gypi') - if os.path.exists(default_include): - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags) - - # TODO: Remove this and the option after we've gotten folks to move to the - # generator flag. - if options.msvs_version: - print >>sys.stderr, \ - 'DEPRECATED: Use generator flag (-G msvs_version=' + \ - options.msvs_version + ') instead of --msvs-version=' + \ - options.msvs_version - generator_flags['msvs_version'] = options.msvs_version - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = {'options': options, - 'build_files': build_files, - 'generator_flags': generator_flags, - 'cwd': os.getcwd(), - 'build_files_arg': build_files_arg, - 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp} - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load(build_files, format, - cmdline_default_variables, - includes, options.depth, - params, options.check, - options.circular_check) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - # Done - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff --git a/third_party/gyp/pylib/gyp/common.py b/third_party/gyp/pylib/gyp/common.py deleted file mode 100644 index f2070bf..0000000 --- a/third_party/gyp/pylib/gyp/common.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import errno -import filecmp -import os.path -import re -import tempfile -import sys - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + ' ' + msg,) - else: - e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(':', 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit('#', 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), - parsed_build_file)) - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ':' + target - if toolset: - fully_qualified = fully_qualified + '#' + toolset - return fully_qualified - - -def RelativePath(path, relative_to): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - - # Convert to absolute (and therefore normalized paths). - path = os.path.abspath(path) - relative_to = os.path.abspath(relative_to) - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ - path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return '' - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$') - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - if _quote.search(argument): - quote = '"' - else: - quote = '' - - encoded = quote + re.sub(_escape, r'\\\1', argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return ' '.join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies. - """ - dependencies = set() - for r in roots: - spec = target_dicts[r] - r_deps = list(set((spec.get('dependencies', []) + - spec.get('dependencies_original', [])))) - for d in r_deps: - if d not in roots: - dependencies.add(d) - for d in DeepDependencyTargets(target_dicts, r_deps): - if d not in roots: - dependencies.add(d) - return list(dependencies) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer: - """Wrapper around file which only covers the target if it differs.""" - def __init__(self): - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix='.tmp', - prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) - try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) - if sys.platform == 'win32' and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. Sadly there - # is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - return Writer() - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - -def uniquer(seq, idfun=None): - if idfun is None: - def idfun(x): return x - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: continue - seen[marker] = 1 - result.append(item) - return result diff --git a/third_party/gyp/pylib/gyp/generator/__init__.py b/third_party/gyp/pylib/gyp/generator/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/third_party/gyp/pylib/gyp/generator/gypd.py b/third_party/gyp/pylib/gyp/generator/gypd.py deleted file mode 100644 index 948f0b8..0000000 --- a/third_party/gyp/pylib/gyp/generator/gypd.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import errno -import os -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = { -} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = \ - gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != '.gyp': - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params['options'].suffix + '.gypd' - - if not output_file in output_files: - output_files[output_file] = input_file - - for output_file, input_file in output_files.iteritems(): - output = open(output_file, 'w') - pprint.pprint(data[input_file], output) - output.close() diff --git a/third_party/gyp/pylib/gyp/generator/gypsh.py b/third_party/gyp/pylib/gyp/generator/gypsh.py deleted file mode 100644 index f48b03f..0000000 --- a/third_party/gyp/pylib/gyp/generator/gypsh.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -generator_default_variables = { -} - -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - 'target_list': target_list, - 'target_dicts': target_dicts, - 'data': data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ - (sys.version, sys.platform, repr(sorted(locals.keys()))) - - code.interact(banner, local=locals) diff --git a/third_party/gyp/pylib/gyp/generator/make.py b/third_party/gyp/pylib/gyp/generator/make.py deleted file mode 100644 index 1196317..0000000 --- a/third_party/gyp/pylib/gyp/generator/make.py +++ /dev/null @@ -1,1286 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the the files readable. - -import gyp -import gyp.common -import os.path - -# Debugging-related imports -- remove me once we're solid. -import code -import pprint - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'OS': 'linux', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.so', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/geni', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'PRODUCT_DIR': '$(builddir)', - 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', - 'LIB_DIR': '$(obj).$(TOOLSET)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - - # This appears unused --- ? - 'CONFIGURATION_NAME': '$(BUILDTYPE)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -def ensure_directory_exists(path): - dir = os.path.dirname(path) - if dir and not os.path.exists(dir): - os.makedirs(dir) - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ("""\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= __default_configuration__ - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -# C++ apps need to be linked with g++. Not sure what's appropriate. -LINK ?= $(CXX) - -CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) -RANLIB.target ?= ranlib - -CC.host ?= gcc -CFLAGS.host ?= -CXX.host ?= g++ -CXXFLAGS.host ?= -LINK.host ?= g++ -LDFLAGS.host ?= -AR.host ?= ar -RANLIB.host ?= ranlib - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$@.d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time.""" -r""" -define fixup_dep -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 's| |\n|g' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" -""" -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(CFLAGS.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(CXXFLAGS.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_alink = AR+RANLIB($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) rc $@ $(filter %.o,$^) && $(RANLIB.$(TOOLSET)) $@ - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = ln -f $< $@ 2>/dev/null || cp -af $< $@ - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) - -# Shared-object link (for generating .so). -# Set SONAME to the library filename so our binaries don't reference the local, -# absolute paths used on the link command-line. -# TODO: perhaps this can share with the LINK command above? -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(LDFLAGS.$(TOOLSET)) $(GYP_LDFLAGS) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" -r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' -""" -""" -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$@)),\\ - $(subst $(cmd_$@),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out $|,$?) - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do C/C++ dependency munging. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p $(dir $@) $(dir $(depfile)) - @$(cmd_$(1)) - @$(call exact_echo,$(call escape_vars,cmd_$@ := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) -) -endef - -# Declare "all" target first so it is the default, even though we don't have the -# deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""") - -ROOT_HEADER_SUFFIX_RULES = ("""\ -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -""") - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ -# Suffix rules, putting all outputs into $(obj). -""") - -SHARED_HEADER_SUFFIX_RULES_SRCDIR = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.s': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc) -"""), - '.S': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cxx': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ -# Try building from generated source, too. -""") - -SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = { - '.c': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -"""), - '.cc': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), - '.cpp': ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -"""), -} - -SHARED_HEADER_SUFFIX_RULES = ( - SHARED_HEADER_SUFFIX_RULES_COMMENT1 + - ''.join(SHARED_HEADER_SUFFIX_RULES_SRCDIR.values()) + - SHARED_HEADER_SUFFIX_RULES_COMMENT2 + - ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR1.values()) + - ''.join(SHARED_HEADER_SUFFIX_RULES_OBJDIR2.values()) -) - -# This gets added to the very beginning of the Makefile. -SHARED_HEADER_SRCDIR = ("""\ -# The source directory tree. -srcdir := %s - -""") - -SHARED_HEADER_BUILDDIR_NAME = ("""\ -# The name of the builddir. -builddir_name ?= %s - -""") - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. First, only consider targets that already have been -# built, as unbuilt targets will be built regardless of dependency info: -all_deps := $(wildcard $(sort $(all_deps))) -# Of those, only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - for res in (filename.endswith(e) for e - in ['.c', '.cc', '.cpp', '.cxx', '.s', '.S']): - if res: - return True - return False - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + '.o' - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace('$', '$$') - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - return s - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - - -srcdir_prefix = '' -def Sourceify(path): - """Convert a path to its source directory form.""" - if '$(' in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to a list of all linker dependencies, -# transitively expanded. -# Used in building shared-library-based executables. -target_link_deps = {} - - -class MakefileWriter: - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - print 'Generating %s' % output_filename - - ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - - self.output = self.ComputeOutput(spec) - self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', - 'shared_library') - if self.type in self._INSTALLABLE_TARGETS: - self.alias = os.path.basename(self.output) - else: - self.alias = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs, - part_of_all) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs, part_of_all) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs, part_of_all) - - all_sources = spec.get('sources', []) + extra_sources - if all_sources: - self.WriteSources(configs, deps, all_sources, - extra_outputs, extra_link_deps, part_of_all) - sources = filter(Compilable, all_sources) - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = set([os.path.splitext(s)[1] for s in sources]) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext]) - for ext in extensions: - if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext]) - self.WriteLn('# End of this set of suffix rules') - - - self.WriteTarget(spec, configs, deps, - extra_link_deps + link_deps, extra_outputs, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = self.alias - - # Update global list of link dependencies. - if self.type == 'static_library': - target_link_deps[qualified_target] = [self.output] - elif self.type == 'shared_library': - # Anyone that uses us transitively depend on all of our link - # dependencies. - target_link_deps[qualified_target] = [self.output] + link_deps - - self.fp.close() - - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - print 'Generating %s' % output_filename - - ensure_directory_exists(output_filename) - self.fp = open(output_filename, 'w') - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn('export builddir_name ?= %s' % - os.path.join(os.path.dirname(output_filename), build_dir)) - self.WriteLn('.PHONY: all') - self.WriteLn('all:') - if makefile_path: - makefile_path = ' -C ' + makefile_path - self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) - self.fp.close() - - - def WriteActions(self, actions, extra_sources, extra_outputs, part_of_all): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - for action in actions: - name = self.target + '_' + action['action_name'] - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - - # Write the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) - if 'message' in action: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) - else: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name)) - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - if self.path: - cd_action = 'cd %s; ' % Sourceify(self.path) - else: - cd_action = '' - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn('cmd_%s = export LD_LIBRARY_PATH=$(builddir)/lib.host:' - '$(builddir)/lib.target:$$LD_LIBRARY_PATH; %s%s' - % (name, cd_action, command)) - self.WriteLn() - outputs = map(self.Absolutify, outputs) - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)']) - self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)']) - self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), - part_of_all=part_of_all, command=name) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = 'action_%s_outputs' % name - self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % outputs_variable) - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs, part_of_all): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - for rule in rules: - name = self.target + '_' + rule['rule_name'] - count = 0 - self.WriteLn('### Generated for rule %s:' % name) - - all_outputs = [] - - for rule_source in rule['rule_sources']: - dirs = set() - rule_source_basename = os.path.basename(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root) - for out in rule['outputs']] - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get('process_outputs_as_sources', False)): - extra_sources.append(out) - all_outputs += outputs - inputs = map(Sourceify, map(self.Absolutify, [rule_source] + - rule.get('inputs', []))) - actions = ['$(call do_cmd,%s_%d)' % (name, count)] - - if name == 'resources_grit': - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ['@touch --no-create $@'] - - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteMakeRule(outputs[:1], ['obj := $(abs_obj)']) - self.WriteMakeRule(outputs[:1], ['builddir := $(abs_builddir)']) - self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions) - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - action = [self.ExpandInputRoot(ac, rule_source_root) - for ac in rule['action']] - mkdirs = '' - if len(dirs) > 0: - mkdirs = 'mkdir -p %s; ' % ' '.join(dirs) - if self.path: - cd_action = 'cd %s; ' % Sourceify(self.path) - else: - cd_action = '' - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = export LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" % { - 'action': gyp.common.EncodePOSIXShellList(action), - 'cd_action': cd_action, - 'count': count, - 'mkdirs': mkdirs, - 'name': name, - }) - self.WriteLn( - 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % { - 'count': count, - 'name': name, - }) - self.WriteLn() - count += 1 - - outputs_variable = 'rule_%s_outputs' % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append('$(%s)' % outputs_variable) - - self.WriteLn('### Finished generating for rule: %s' % name) - self.WriteLn() - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Generated for copy rule.') - - variable = self.target + '_copies' - outputs = [] - for copy in copies: - for path in copy['files']: - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.Absolutify(os.path.join(copy['destination'], - filename))) - self.WriteDoCmd([output], [path], 'copy', part_of_all) - outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteSources(self, configs, deps, sources, - extra_outputs, extra_link_deps, - part_of_all): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', - quoter=EscapeCppDefine) - self.WriteLn("# Flags passed to both C and C++ files."); - self.WriteList(config.get('cflags'), 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C (and not C++) files."); - self.WriteList(config.get('cflags_c'), 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ (and not C) files."); - self.WriteList(config.get('cflags_cc'), 'CFLAGS_CC_%s' % configname) - includes = config.get('include_dirs') - if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) - self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - - sources = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, sources))) - self.WriteList(objs, 'OBJS') - - self.WriteLn('# Add to the list of files we specially track ' - 'dependencies for.') - self.WriteLn('all_deps += $(OBJS)') - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule(['$(OBJS)'], deps, - comment = 'Make sure our dependencies are built ' - 'before any of us.', - order_only = True) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule(['$(OBJS)'], extra_outputs, - comment = 'Make sure our actions/rules run ' - 'before any of us.', - order_only = True) - - if objs: - extra_link_deps.append('$(OBJS)') - self.WriteLn("""\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""") - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn("$(OBJS): GYP_CFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_CXXFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))") - - self.WriteLn() - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - output = None - target = spec['target_name'] - target_prefix = '' - target_ext = '' - path = os.path.join('$(obj).' + self.toolset, self.path) - if self.type == 'static_library': - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.a' - elif self.type in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.so' - elif self.type == 'none': - target = '%s.stamp' % target - elif self.type == 'settings': - return None - elif self.type == 'executable': - path = os.path.join('$(builddir)') - else: - print ("ERROR: What output file should be generated?", - "typ", self.type, "target", target) - - path = spec.get('product_dir', path) - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - return os.path.join(path, target_prefix + target + target_ext) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.extend(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteTarget(self, spec, configs, deps, link_deps, extra_outputs, - part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn('### Rules for final target.') - - if extra_outputs: - self.WriteMakeRule([self.output], extra_outputs, - comment = 'Build our special outputs first.', - order_only = True) - self.WriteMakeRule(extra_outputs, deps, - comment=('Preserve order dependency of ' - 'special output on deps.'), - order_only = True, - multiple_output_trick = False) - - if self.type not in ('settings', 'none'): - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('ldflags'), 'LDFLAGS_%s' % configname) - libraries = spec.get('libraries') - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - self.WriteList(libraries, 'LIBS') - self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output) - self.WriteLn('%s: LIBS := $(LIBS)' % self.output) - - if self.type == 'executable': - self.WriteDoCmd([self.output], link_deps, 'link', part_of_all) - elif self.type == 'static_library': - self.WriteDoCmd([self.output], link_deps, 'alink', part_of_all) - elif self.type in ('loadable_module', 'shared_library'): - self.WriteDoCmd([self.output], link_deps, 'solink', part_of_all) - elif self.type == 'none': - # Write a stamp line. - self.WriteDoCmd([self.output], deps, 'touch', part_of_all) - elif self.type == 'settings': - # Only used for passing flags around. - pass - else: - print "WARNING: no output for", self.type, target - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if ((self.output and self.output != self.target) and - (self.type not in self._INSTALLABLE_TARGETS)): - self.WriteMakeRule([self.target], [self.output], - comment='Add target alias', phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [self.target], - comment = 'Add target alias to "all" target.', - phony = True) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if self.type in self._INSTALLABLE_TARGETS: - if self.type in ('shared_library'): - file_desc = 'shared library' - # Install all shared libs into a common directory (per toolset) for - # convenient access with LD_LIBRARY_PATH. - binpath = '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) - else: - file_desc = 'executable' - binpath = '$(builddir)/' + self.alias - installable_deps = [self.output] - # Point the target alias to the final binary output. - self.WriteMakeRule([self.target], [binpath], - comment='Add target alias', phony = True) - if binpath != self.output: - self.WriteDoCmd([binpath], [self.output], 'copy', - comment = 'Copy this to the %s output path.' % - file_desc, part_of_all=part_of_all) - installable_deps.append(binpath) - if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule([self.alias], installable_deps, - comment = 'Short alias for building this %s.' % - file_desc, phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [binpath], - comment = 'Add %s to "all" target.' % file_desc, - phony = True) - - - def WriteList(self, list, variable=None, prefix='', quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - self.fp.write(variable + " := ") - if list: - list = [quoter(prefix + l) for l in list] - self.fp.write(" \\\n\t".join(list)) - self.fp.write("\n\n") - - - def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - self.WriteMakeRule(outputs, inputs, - actions = ['$(call do_cmd,%s)' % command], - comment = comment, - force = True) - # Add our outputs to the list of targets we read depfiles from. - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - - def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, - order_only=False, force=False, phony=False, - multiple_output_trick=True): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - multiple_output_trick: if true (the default), perform tricks such as dummy - rules to avoid problems with multiple outputs. - """ - if comment: - self.WriteLn('# ' + comment) - if phony: - self.WriteLn('.PHONY: ' + ' '.join(outputs)) - # TODO(evanm): just make order_only a list of deps instead of these hacks. - if order_only: - order_insert = '| ' - else: - order_insert = '' - if force: - force_append = ' FORCE_DO_CMD' - else: - force_append = '' - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs), - force_append)) - if actions: - for action in actions: - self.WriteLn('\t%s' % action) - if multiple_output_trick and len(outputs) > 1: - # If we have more than one output, a rule like - # foo bar: baz - # that for *each* output we must run the action, potentially - # in parallel. That is not what we're trying to write -- what - # we want is that we run the action once and it generates all - # the files. - # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html - # discusses this problem and has this solution: - # 1) Write the naive rule that would produce parallel runs of - # the action. - # 2) Make the outputs seralized on each other, so we won't start - # a parallel run until the first run finishes, at which point - # we'll have generated all the outputs and we're done. - self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0])) - # Add a dummy command to the "extra outputs" rule, otherwise make seems to - # think these outputs haven't (couldn't have?) changed, and thus doesn't - # flag them as changed (i.e. include in '$?') when evaluating dependent - # rules, which in turn causes do_cmd() to skip running dependent commands. - self.WriteLn('%s: ;' % (' '.join(outputs[1:]))) - self.WriteLn() - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset) - return path - return '$(obj).%s/$(TARGET)/%s' % (self.toolset, path) - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if '$(' in path: - return path - return os.path.normpath(os.path.join(self.path, path)) - - - def FixupArgPath(self, arg): - if '/' in arg or '.h.' in arg: - return self.Absolutify(arg) - return arg - - - def ExpandInputRoot(self, template, expansion): - if '%(INPUT_ROOT)s' not in template: - return template - path = template % { 'INPUT_ROOT': expansion } - if not os.path.dirname(path): - # If it's just the file name, turn it into a path so FixupArgPath() - # will know to Absolutify() it. - path = os.path.join('.', path) - return path - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join(options.generator_output, output_file) - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'Makefile' + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join(options.generator_output, makefile_path) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = '$(srcdir)/' - ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - root_makefile.write(SHARED_HEADER_SRCDIR % srcdir) - root_makefile.write(SHARED_HEADER_BUILDDIR_NAME % builddir_name) - root_makefile.write(SHARED_HEADER.replace('__default_configuration__', - default_configuration)) - for toolset in toolsets: - root_makefile.write('TOOLSET := %s\n' % toolset) - root_makefile.write(ROOT_HEADER_SUFFIX_RULES) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - writer = MakefileWriter() - writer.Write(qualified_target, base_path, output_file, spec, configs, - part_of_all=qualified_target in needed_targets) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add('include ' + mkfile_rel_path + '\n') - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [target_dicts[target]['target_name'] for target in target_list - if target.startswith(build_file) and - target in needed_targets] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath(build_file, - os.path.splitext(os.path.basename(build_file))[0] + '.Makefile') - makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), - os.path.dirname(output_file)) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, - builddir_name) - - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include in sorted(include_list): - root_makefile.write(include) - root_makefile.write('\n') - - # Write the target to regenerate the Makefile. - if generator_flags.get('auto_regeneration', True): - build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params['build_files_arg']] - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.toplevel_dir) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - root_makefile.write("%s: %s\n\t%s\n" % ( - makefile_name, - ' '.join(map(Sourceify, build_files)), - gyp.common.EncodePOSIXShellList( - [gyp_binary, '-fmake'] + - gyp.RegenerateFlags(options) + - build_files_args))) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff --git a/third_party/gyp/pylib/gyp/generator/msvs.py b/third_party/gyp/pylib/gyp/generator/msvs.py deleted file mode 100644 index 9e108f8..0000000 --- a/third_party/gyp/pylib/gyp/generator/msvs.py +++ /dev/null @@ -1,1216 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import ntpath -import posixpath -import os -import re -import subprocess -import sys - -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSVersion as MSVSVersion -import gyp.common - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$') - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '.exe', - 'STATIC_LIB_PREFIX': '', - 'SHARED_LIB_PREFIX': '', - 'STATIC_LIB_SUFFIX': '.lib', - 'SHARED_LIB_SUFFIX': '.dll', - 'INTERMEDIATE_DIR': '$(IntDir)', - 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate', - 'OS': 'win', - 'PRODUCT_DIR': '$(OutDir)', - 'LIB_DIR': '$(OutDir)/lib', - 'RULE_INPUT_ROOT': '$(InputName)', - 'RULE_INPUT_EXT': '$(InputExt)', - 'RULE_INPUT_NAME': '$(InputFileName)', - 'RULE_INPUT_PATH': '$(InputPath)', - 'CONFIGURATION_NAME': '$(ConfigurationName)', -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - 'msvs_cygwin_dirs', - 'msvs_props', -] - -generator_additional_non_configuration_keys = [ - 'msvs_cygwin_dirs', - 'msvs_cygwin_shell', -] - -cached_username = None -cached_domain = None - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ('win32', 'cygwin'): - return ('DOMAIN', 'USERNAME') - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get('USERDOMAIN') - username = os.environ.get('USERNAME') - if not domain or not username: - call = subprocess.Popen(['net', 'config', 'Workstation'], - stdout=subprocess.PIPE) - config = call.communicate()[0] - username_re = re.compile('^User name\s+(\S+)', re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - -fixpath_prefix = None - -def _FixPath(path): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': - path = os.path.join(fixpath_prefix, path) - path = path.replace('/', '\\') - if len(path) > 0 and path[-1] == '\\': - path = path[:-1] - return path - - -def _SourceInFolders(sources, prefix=None, excluded=None): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _SourceInFolders([['a', 'bob1.c'], ['b', 'bob2.c']], prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: prefix = [] - result = [] - excluded_result = [] - folders = dict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = '\\'.join(prefix + s) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - else: - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - # Add a folder for excluded files. - if excluded_result: - excluded_folder = MSVSProject.Filter('_excluded_files', - contents=excluded_result) - result.append(excluded_folder) - # Populate all the folders. - for f in folders: - contents = _SourceInFolders(folders[f], prefix=prefix + [f], - excluded=excluded) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: return - # TODO(bradnelson): ugly hack, fix this more generally!!! - if 'Directories' in setting or 'Dependencies' in setting: - if type(value) == str: - value = value.replace('/', '\\') - else: - value = [i.replace('/', '\\') for i in value] - if not tools.get(tool_name): - tools[tool_name] = dict() - tool = tools[tool_name] - if tool.get(setting): - if only_if_unset: return - if type(tool[setting]) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - 'not allowed, previous value: %s' % ( - value, setting, tool_name, str(tool[setting]))) - else: - tool[setting] = value - - -def _ConfigPlatform(config_data): - return config_data.get('msvs_configuration_platform', 'Win32') - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith('_' + platform_name): - return config_name[0:-len(platform_name)-1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) - - -def _PrepareActionRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd): - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [i.replace('$(IntDir)', - '`cygpath -m "${INTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(OutDir)', - '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] - if has_input_path: - direct_cmd = [i.replace('$(InputPath)', - '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd] - direct_cmd = ['"%s"' % i for i in direct_cmd] - direct_cmd = [i.replace('"', '\\"') for i in direct_cmd] - #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = ' '.join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = ( - '"$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - 'set CYGWIN=nontsec&& ') - if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: - cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' - if direct_cmd.find('INTDIR') >= 0: - cmd += 'set INTDIR=$(IntDir)&& ' - if direct_cmd.find('OUTDIR') >= 0: - cmd += 'set OUTDIR=$(OutDir)&& ' - if has_input_path and direct_cmd.find('INPUTPATH') >= 0: - cmd += 'set INPUTPATH=$(InputPath) && ' - cmd += ( - 'bash -c "%(cmd)s"') - cmd = cmd % {'cygwin_dir': cygwin_dir, - 'cmd': direct_cmd} - return cmd - else: - # Convert cat --> type to mimic unix. - if cmd[0] == 'cat': - cmd = ['type'] + cmd[1:] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - direct_cmd = ([cmd[0].replace('/', '\\')] + - ['"%s"' % _FixPath(i) for i in cmd[1:]]) - else: - direct_cmd = ([cmd[0].replace('/', '\\')] + - [_FixPath(i) for i in cmd[1:]]) - # Collapse into a single command. - return ' '.join(direct_cmd) - -def _PrepareAction(spec, rule, has_input_path): - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get('msvs_cygwin_shell') - if mcs is None: - mcs = int(spec.get('msvs_cygwin_shell', 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get('msvs_quote_cmd', 1)) - return _PrepareActionRaw(spec, rule['action'], mcs, - has_input_path, quote_cmd) - - -def _PickPrimaryInput(inputs): - # Pick second input as the primary one, unless there's only one. - # TODO(bradnelson): this is a bit of a hack, - # find something more general. - if len(inputs) > 1: - return inputs[1] - else: - return inputs[0] - -def _SetRunAs(user_file, config_name, c_data, command, - environment={}, working_directory=""): - """Add a run_as rule to the user file. - - Arguments: - user_file: The MSVSUserFile to add the command to. - config_name: The name of the configuration to add it to - c_data: The dict of the configuration to add it to - command: The path to the command to execute. - args: An array of arguments to the command. (optional) - working_directory: Directory to run the command in. (optional) - """ - user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), - command, environment, working_directory) - -def _AddCustomBuildTool(p, spec, inputs, outputs, description, cmd): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = [_FixPath(i) for i in inputs] - outputs = [_FixPath(i) for i in outputs] - tool = MSVSProject.Tool( - 'VCCustomBuildTool', { - 'Description': description, - 'AdditionalDependencies': ';'.join(inputs), - 'Outputs': ';'.join(outputs), - 'CommandLine': cmd, - }) - primary_input = _PickPrimaryInput(inputs) - # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): - p.AddFileConfig(primary_input, - _ConfigFullName(config_name, c_data), tools=[tool]) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace('$(InputName)', - os.path.splitext(os.path.split(input_file)[1])[0]) - path = path.replace('$(InputExt)', - os.path.splitext(os.path.split(input_file)[1])[1]) - path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) - path = path.replace('$(InputPath)', input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - rule_ext = rule['extension'] - return [s for s in sources if s.endswith('.' + rule_ext)] - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = rule.get('inputs', []) - raw_outputs = rule.get('outputs', []) - inputs = set() - outputs = set() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRules(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = '%s%s.rules' % (spec['target_name'], - options.suffix) - rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename)) - rules_file.Create(spec['target_name']) - # Add each rule. - for r in rules: - rule_name = r['rule_name'] - rule_ext = r['extension'] - inputs = [_FixPath(i) for i in r.get('inputs', [])] - outputs = [_FixPath(i) for i in r.get('outputs', [])] - cmd = _PrepareAction(spec, r, has_input_path=True) - rules_file.AddCustomBuildRule(name=rule_name, - description=r.get('message', rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd) - # Write out rules file. - rules_file.Write() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace('$(OutDir)', '$(OutDirCygwin)') - path = path.replace('$(IntDir)', '$(IntDirCygwin)') - return path - - -def _GenerateExternalRules(p, rules, output_dir, spec, - sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - p: the target project - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - """ - filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) - file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = set() - all_outputs = set() - all_output_dirs = set() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(set(inputs)) - all_outputs.update(set(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(list(outputs)[0]) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - file.write('all: %s\n' % ' '.join(first_outputs_cyg)) - for od in all_output_dirs: - file.write('\tmkdir -p %s\n' % od) - file.write('\n') - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule['action']] - cmd = ['"%s"' % i for i in cmd] - cmd = ' '.join(cmd) - # Add it to the makefile. - file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) - file.write('\t%s\n\n' % cmd) - # Close up the file. - file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = ['make', - 'OutDir=$(OutDir)', - 'IntDir=$(IntDir)', - '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', - '-f', filename] - cmd = _PrepareActionRaw(spec, cmd, True, False, True) - # TODO(bradnelson): this won't be needed if we have a better way to pick - # the primary input. - all_inputs = list(all_inputs) - all_inputs.insert(1, filename) - actions_to_add.append({ - 'inputs': [_FixPath(i) for i in all_inputs], - 'outputs': [_FixPath(i) for i in all_outputs], - 'description': 'Running %s' % cmd, - 'cmd': cmd, - }) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this.""" - s = s.replace('%', '%%') - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') -def _EscapeCommandLineArgument(s): - """Escapes a Windows command-line argument, so that the Win32 - CommandLineToArgv function will turn the escaped result back into the - original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this.""" - def replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') -def _EscapeVCProjCommandLineArgListItem(s): - """The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention.""" - def replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - list = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(list), 2): - list[i] = delimiters_replacer_regex.sub(replace, list[i]) - # Concatenate back into a single string - s = '"'.join(list) - if len(list) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) - return s - - -def _EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgument(s) - s = _EscapeVCProjCommandLineArgListItem(s) - return s - - -def _GenerateRules(p, output_dir, options, spec, - sources, excluded_sources, - actions_to_add): - """Generate all the rules for a particular project. - - Arguments: - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRules(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules(p, rules_external, output_dir, spec, - sources, options, actions_to_add) - - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Done if not processing outputs as sources. - if int(rule.get('process_outputs_as_sources', False)): - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs.remove(tf) - sources.update(inputs) - excluded_sources.update(inputs) - sources.update(outputs) - - -def _GenerateProject(vcproj_filename, build_file, spec, options, version): - """Generates a vcproj file. - - Arguments: - vcproj_filename: Filename of the vcproj file to generate. - build_file: Filename of the .gyp file that the vcproj file comes from. - spec: The target dictionary containing the properties of the target. - """ - # Pluck out the default configuration. - default_config = spec['configurations'][spec['default_configuration']] - # Decide the guid of the project. - guid = default_config.get('msvs_guid') - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) == None: - raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % - (guid, VALID_MSVS_GUID_CHARS.pattern)) - guid = '{%s}' % guid - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get('msvs_existing_vcproj'): - return guid - - #print 'Generating %s' % vcproj_filename - - vcproj_dir = os.path.dirname(vcproj_filename) - if vcproj_dir and not os.path.exists(vcproj_dir): - os.makedirs(vcproj_dir) - - # Gather list of unique platforms. - platforms = set() - for configuration in spec['configurations']: - platforms.add(_ConfigPlatform(spec['configurations'][configuration])) - platforms = list(platforms) - - p = MSVSProject.Writer(vcproj_filename, version=version) - p.Create(spec['target_name'], guid=guid, platforms=platforms) - - # Create the user file. - (domain, username) = _GetDomainAndUserName() - vcuser_filename = '.'.join([vcproj_filename, domain, username, 'user']) - user_file = MSVSUserFile.Writer(vcuser_filename, version=version) - user_file.Create(spec['target_name']) - - # Get directory project file is in. - gyp_dir = os.path.split(vcproj_filename)[0] - - # Pick target configuration type. - try: - config_type = { - 'executable': '1', # .exe - 'shared_library': '2', # .dll - 'loadable_module': '2', # .dll - 'static_library': '4', # .lib - 'none': '10', # Utility type - 'dummy_executable': '1', # .exe - }[spec['type']] - except KeyError, e: - if spec.get('type'): - raise Exception('Target type %s is not a valid target type for ' - 'target %s in %s.' % - (spec['type'], spec['target_name'], build_file)) - else: - raise Exception('Missing type field for target %s in %s.' % - (spec['target_name'], build_file)) - - for config_name, c in spec['configurations'].iteritems(): - # Process each configuration. - vsprops_dirs = c.get('msvs_props', []) - vsprops_dirs = [_FixPath(i) for i in vsprops_dirs] - - # Prepare the list of tools as a dictionary. - tools = dict() - - # Add in msvs_settings. - for tool in c.get('msvs_settings', {}): - settings = c['msvs_settings'][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - - # Add in includes. - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = ( - c.get('include_dirs', []) + - c.get('msvs_system_include_dirs', [])) - resource_include_dirs = c.get('resource_include_dirs', include_dirs) - include_dirs = [_FixPath(i) for i in include_dirs] - resource_include_dirs = [_FixPath(i) for i in resource_include_dirs] - _ToolAppend(tools, 'VCCLCompilerTool', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'AdditionalIncludeDirectories', resource_include_dirs) - - # Add in libraries. - libraries = spec.get('libraries', []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - libraries = [re.sub('^(\-l)', '', lib) for lib in libraries] - # Add them. - _ToolAppend(tools, 'VCLinkerTool', - 'AdditionalDependencies', libraries) - - # Select a name for the output file. - output_file_map = { - 'executable': ('VCLinkerTool', '$(OutDir)\\', '.exe'), - 'shared_library': ('VCLinkerTool', '$(OutDir)\\', '.dll'), - 'loadable_module': ('VCLinkerTool', '$(OutDir)\\', '.dll'), - 'static_library': ('VCLibrarianTool', '$(OutDir)\\lib\\', '.lib'), - 'dummy_executable': ('VCLinkerTool', '$(IntDir)\\', '.junk'), - } - output_file_props = output_file_map.get(spec['type']) - if output_file_props and int(spec.get('msvs_auto_output_file', 1)): - vc_tool, out_dir, suffix = output_file_props - out_dir = spec.get('product_dir', out_dir) - product_extension = spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - _ToolAppend(tools, vc_tool, 'OutputFile', out_file, - only_if_unset=True) - - # Add defines. - defines = [] - for d in c.get('defines', []): - if type(d) == list: - fd = '='.join([str(dpart) for dpart in d]) - else: - fd = str(d) - fd = _EscapeCppDefine(fd) - defines.append(fd) - - _ToolAppend(tools, 'VCCLCompilerTool', - 'PreprocessorDefinitions', defines) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'PreprocessorDefinitions', defines) - - # Change program database directory to prevent collisions. - _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', - '$(IntDir)\\$(ProjectName)\\vc80.pdb') - - # Add disabled warnings. - disabled_warnings = [str(i) for i in c.get('msvs_disabled_warnings', [])] - _ToolAppend(tools, 'VCCLCompilerTool', - 'DisableSpecificWarnings', disabled_warnings) - - # Add Pre-build. - prebuild = c.get('msvs_prebuild') - _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) - - # Add Post-build. - postbuild = c.get('msvs_postbuild') - _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) - - # Turn on precompiled headers if appropriate. - header = c.get('msvs_precompiled_header') - if header: - header = os.path.split(header)[1] - _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') - _ToolAppend(tools, 'VCCLCompilerTool', - 'PrecompiledHeaderThrough', header) - _ToolAppend(tools, 'VCCLCompilerTool', - 'ForcedIncludeFiles', header) - - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') - - # Set the module definition file if any. - if spec['type'] in ['shared_library', 'loadable_module']: - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', - _FixPath(def_files[0])) - elif def_files: - raise ValueError('Multiple module definition files in one target, ' - 'target %s lists multiple .def files: %s' % ( - spec['target_name'], ' '.join(def_files))) - - # Convert tools to expected form. - tool_list = [] - for tool, settings in tools.iteritems(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.iteritems(): - if type(value) == list: - if ((tool == 'VCLinkerTool' and - setting == 'AdditionalDependencies') or - setting == 'AdditionalOptions'): - settings_fixed[setting] = ' '.join(value) - else: - settings_fixed[setting] = ';'.join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = c.get('msvs_configuration_attributes', {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - if vsprops_dirs: - prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) - # Set configuration type. - prepared_attrs['ConfigurationType'] = config_type - if not prepared_attrs.has_key('OutputDirectory'): - prepared_attrs['OutputDirectory'] = '$(SolutionDir)$(ConfigurationName)' - if not prepared_attrs.has_key('IntermediateDirectory'): - intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' - prepared_attrs['IntermediateDirectory'] = intermediate - - # Add in this configuration. - p.AddConfig(_ConfigFullName(config_name, c), - attrs=prepared_attrs, tools=tool_list) - - # Prepare list of sources and excluded sources. - sources = set(spec.get('sources', [])) - excluded_sources = set() - # Add in the gyp file. - gyp_file = os.path.split(build_file)[1] - sources.add(gyp_file) - # Add in 'action' inputs and outputs. - for a in spec.get('actions', []): - inputs = a.get('inputs') - if not inputs: - # This is an action with no inputs. Make the primary input - # by the .gyp file itself so Visual Studio has a place to - # hang the custom build rule. - inputs = [gyp_file] - a['inputs'] = inputs - primary_input = _PickPrimaryInput(inputs) - inputs = set(inputs) - sources.update(inputs) - inputs.remove(primary_input) - excluded_sources.update(inputs) - if int(a.get('process_outputs_as_sources', False)): - outputs = set(a.get('outputs', [])) - sources.update(outputs) - # Add in 'copies' inputs and outputs. - for cpy in spec.get('copies', []): - files = set(cpy.get('files', [])) - sources.update(files) - - # Add rules. - actions_to_add = [] - _GenerateRules(p, gyp_dir, options, spec, - sources, excluded_sources, - actions_to_add) - - # Exclude excluded sources coming into the generator. - excluded_sources.update(set(spec.get('sources_excluded', []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = [_FixPath(i) for i in sources] - # Convert to proper windows form. - excluded_sources = [_FixPath(i) for i in excluded_sources] - - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get('rules', []): - if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): - using_idl = True - break - if using_idl: - excluded_idl = [i for i in sources if i.endswith('.idl')] - else: - excluded_idl = [] - - # List of precompiled header related keys. - precomp_keys = [ - 'msvs_precompiled_header', - 'msvs_precompiled_source', - ] - - # Gather a list of precompiled header related sources. - precompiled_related = [] - for config_name, c in spec['configurations'].iteritems(): - for k in precomp_keys: - f = c.get(k) - if f: - precompiled_related.append(_FixPath(f)) - - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split('\\') for i in sources] - sources = _SourceInFolders(sources, excluded=fully_excluded) - # Add in dummy file for type none. - if spec['type'] == 'dummy_executable': - # Pull in a dummy main so it can link successfully. - dummy_relpath = gyp.common.RelativePath( - options.depth + '\\tools\\gyp\\gyp_dummy.c', gyp_dir) - sources.append(dummy_relpath) - # Add in files. - p.AddFiles(sources) - - # Add deferred actions to add. - for a in actions_to_add: - _AddCustomBuildTool(p, spec, - inputs=a['inputs'], - outputs=a['outputs'], - description=a['description'], - cmd=a['cmd']) - - # Exclude excluded sources from being built. - for f in excluded_sources: - for config_name, c in spec['configurations'].iteritems(): - precomped = [_FixPath(c.get(i, '')) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - p.AddFileConfig(f, _ConfigFullName(config_name, c), - {'ExcludedFromBuild': 'true'}) - - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for config_name, c in spec['configurations'].iteritems(): - for f in excluded_idl: - p.AddFileConfig(f, _ConfigFullName(config_name, c), - {'ExcludedFromBuild': 'true'}) - - # Add in tool files (rules). - tool_files = set() - for config_name, c in spec['configurations'].iteritems(): - for f in c.get('msvs_tool_files', []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - # Handle pre-compiled headers source stubs specially. - for config_name, c in spec['configurations'].iteritems(): - source = c.get('msvs_precompiled_source') - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '1'}) - p.AddFileConfig(source, _ConfigFullName(config_name, c), - {}, tools=[tool]) - - # Add actions. - actions = spec.get('actions', []) - for a in actions: - cmd = _PrepareAction(spec, a, has_input_path=False) - _AddCustomBuildTool(p, spec, - inputs=a.get('inputs', []), - outputs=a.get('outputs', []), - description=a.get('message', a['action_name']), - cmd=cmd) - - # Add run_as and test targets. - has_run_as = False - if spec.get('run_as') or int(spec.get('test', 0)): - has_run_as = True - run_as = spec.get('run_as', { - 'action' : ['$(TargetPath)', '--gtest_print_time'], - }) - working_directory = run_as.get('working_directory', '.') - action = run_as.get('action', []) - environment = run_as.get('environment', []) - for config_name, c_data in spec['configurations'].iteritems(): - _SetRunAs(user_file, config_name, c_data, - action, environment, working_directory) - - # Add copies. - for cpy in spec.get('copies', []): - for src in cpy.get('files', []): - dst = os.path.join(cpy['destination'], os.path.basename(src)) - # _AddCustomBuildTool() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith('/'): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( - _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) - _AddCustomBuildTool(p, spec, - inputs=[src], - outputs=['dummy_copies', dst], - description='Copying %s to %s' % (src, dst), - cmd=cmd) - else: - cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) - _AddCustomBuildTool(p, spec, - inputs=[src], outputs=[dst], - description='Copying %s to %s' % (src, dst), - cmd=cmd) - - # Write it out. - p.Write() - - # Write out the user file, but only if we need to. - if has_run_as: - user_file.Write() - - # Return the guid so we can refer to it elsewhere. - return p.guid - - -def _GetPathDict(root, path): - if path == '': - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = dict() - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.iteritems(): - if type(contents) == dict: - folder_children = _DictsToFolders(os.path.join(base_path, folder), - contents, flat) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), - name='(' + folder + ')', - entries=folder_children) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if (type(node) == dict and - len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] - if type(node) != dict: - return node - for child in node.keys(): - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(project_objs, flat): - root = {} - # Convert into a tree of dicts on path. - for p in project_objs.keys(): - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + '.vcproj'] = project_objs[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] - # Collapse singles. - root = _CollapseSingles('', root) - # Merge buckets until everything is a root entry. - return _DictsToFolders('', root, flat) - - -def _ProjectObject(sln, qualified_target, project_objs, projects): - # Done if this project has an object. - if project_objs.get(qualified_target): - return project_objs[qualified_target] - # Get dependencies for this project. - spec = projects[qualified_target]['spec'] - deps = spec.get('dependencies', []) - # Get objects for each dependency. - deps = [_ProjectObject(sln, d, project_objs, projects) for d in deps] - # Find relative path to vcproj from sln. - vcproj_rel_path = gyp.common.RelativePath( - projects[qualified_target]['vcproj_path'], os.path.split(sln)[0]) - vcproj_rel_path = _FixPath(vcproj_rel_path) - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get('msvs_target_platform', _ConfigPlatform(c)) - fixed_config_fullname = '%s|%s' % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) - config_platform_overrides[config_fullname] = fixed_config_fullname - # Create object for this project. - obj = MSVSNew.MSVSProject( - vcproj_rel_path, - name=spec['target_name'], - guid=projects[qualified_target]['guid'], - dependencies=deps, - config_platform_overrides=config_platform_overrides) - # Store it to the list of objects. - project_objs[qualified_target] = obj - # Return project object. - return obj - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get('generator_flags', {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = \ - MSVSVersion.SelectVisualStudioVersion(generator_flags.get('msvs_version', - 'auto')) - # Stash msvs_version for later (so we don't have to probe the system twice). - params['msvs_version'] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or - os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params['options'] - generator_flags = params.get('generator_flags', {}) - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params['msvs_version'] - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - build_file = gyp.common.BuildFile(qualified_target) - spec = target_dicts[qualified_target] - for config_name, c in spec['configurations'].iteritems(): - configs.add(_ConfigFullName(config_name, c)) - configs = list(configs) - - # Generate each project. - projects = {} - for qualified_target in target_list: - build_file = gyp.common.BuildFile(qualified_target) - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in msvs build (target %s)' % - qualified_target) - default_config = spec['configurations'][spec['default_configuration']] - vcproj_filename = default_config.get('msvs_existing_vcproj') - if not vcproj_filename: - vcproj_filename = spec['target_name'] + options.suffix + '.vcproj' - vcproj_path = os.path.join(os.path.split(build_file)[0], vcproj_filename) - if options.generator_output: - projectDirPath = os.path.dirname(os.path.abspath(vcproj_path)) - vcproj_path = os.path.join(options.generator_output, vcproj_path) - fixpath_prefix = gyp.common.RelativePath(projectDirPath, - os.path.dirname(vcproj_path)) - projects[qualified_target] = { - 'vcproj_path': vcproj_path, - 'guid': _GenerateProject(vcproj_path, build_file, - spec, options, version=msvs_version), - 'spec': spec, - } - - fixpath_prefix = None - - for build_file in data.keys(): - # Validate build_file extension - if build_file[-4:] != '.gyp': - continue - sln_path = build_file[:-4] + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - #print 'Generating %s' % sln_path - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Convert projects to Project Objects. - project_objs = {} - for p in sln_projects: - _ProjectObject(sln_path, p, project_objs, projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - project_objs, flat=msvs_version.FlatSolution()) - # Create solution. - sln = MSVSNew.MSVSSolution(sln_path, - entries=root_entries, - variants=configs, - websiteProperties=False, - version=msvs_version) - sln.Write() diff --git a/third_party/gyp/pylib/gyp/generator/scons.py b/third_party/gyp/pylib/gyp/generator/scons.py deleted file mode 100644 index c5338e9..0000000 --- a/third_party/gyp/pylib/gyp/generator/scons.py +++ /dev/null @@ -1,1047 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp -import gyp.common -import gyp.SCons as SCons -import os.path -import pprint -import re - - -# TODO: remove when we delete the last WriteList() call in this module -WriteList = SCons.WriteList - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': '${LIBPREFIX}', - 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}', - 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}', - 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}', - 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}', - 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}', - 'OS': 'linux', - 'PRODUCT_DIR': '$TOP_BUILDDIR', - 'SHARED_LIB_DIR': '$LIB_DIR', - 'LIB_DIR': '$LIB_DIR', - 'RULE_INPUT_ROOT': '${SOURCE.filebase}', - 'RULE_INPUT_EXT': '${SOURCE.suffix}', - 'RULE_INPUT_NAME': '${SOURCE.file}', - 'RULE_INPUT_PATH': '${SOURCE.abspath}', - 'CONFIGURATION_NAME': '${CONFIG_NAME}', -} - -# Tell GYP how to process the input for us. -generator_handles_variants = True -generator_wants_absolute_build_file_paths = True - - -def FixPath(path, prefix): - if not os.path.isabs(path) and not path[0] == '$': - path = prefix + path - return path - - -header = """\ -# This file is generated; do not edit. -""" - - -_alias_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Alias( - ['_%(target_name)s_action'], - %(inputs)s, - _action -) -env.AlwaysBuild(_outputs) -""" - -_run_as_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -""" - -_run_as_template_suffix = """ -_run_as_target = env.Alias('run_%(target_name)s', target_files, _action) -env.Requires(_run_as_target, [ - Alias('%(target_name)s'), -]) -env.AlwaysBuild(_run_as_target) -""" - -_command_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Command( - %(outputs)s, - %(inputs)s, - _action -) -""" - -# This is copied from the default SCons action, updated to handle symlinks. -_copy_action_template = """ -import shutil -import SCons.Action - -def _copy_files_or_dirs_or_symlinks(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.islink(src): - linkto = os.readlink(src) - os.symlink(linkto, dest) - return 0 - elif os.path.isfile(src): - return shutil.copy2(src, dest) - else: - return shutil.copytree(src, dest, 1) - -def _copy_files_or_dirs_or_symlinks_str(dest, src): - return 'Copying %s to %s ...' % (src, dest) - -GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks, - _copy_files_or_dirs_or_symlinks_str, - convert=str) -""" - -_rule_template = """ -%(name)s_additional_inputs = %(inputs)s -%(name)s_outputs = %(outputs)s -def %(name)s_emitter(target, source, env): - return (%(name)s_outputs, source + %(name)s_additional_inputs) -if GetOption('verbose'): - %(name)s_action = Action([%(action)s]) -else: - %(name)s_action = Action([%(action)s], %(message)s) -env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action, - emitter=%(name)s_emitter) - -_outputs = [] -_processed_input_files = [] -for infile in input_files: - if (type(infile) == type('') - and not os.path.isabs(infile) - and not infile[0] == '$'): - infile = %(src_dir)r + infile - if str(infile).endswith('.%(extension)s'): - _generated = env.%(name)s(infile) - env.Precious(_generated) - _outputs.append(_generated) - %(process_outputs_as_sources_line)s - else: - _processed_input_files.append(infile) -prerequisites.extend(_outputs) -input_files = _processed_input_files -""" - -_spawn_hack = """ -import re -import SCons.Platform.posix -needs_shell = re.compile('["\\'>= 2.5: - return os.sysconf('SC_NPROCESSORS_ONLN') - else: # Mac OS X with Python < 2.5: - return int(os.popen2("sysctl -n hw.ncpu")[1].read()) - # Windows: - if os.environ.has_key('NUMBER_OF_PROCESSORS'): - return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1) - return 1 # Default - -# Support PROGRESS= to show progress in different ways. -p = ARGUMENTS.get('PROGRESS') -if p == 'spinner': - Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'], - interval=5, - file=open('/dev/tty', 'w')) -elif p == 'name': - Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w')) - -# Set the default -j value based on the number of processors. -SetOption('num_jobs', GetProcessorCount() + 1) - -# Have SCons use its cached dependency information. -SetOption('implicit_cache', 1) - -# Only re-calculate MD5 checksums if a timestamp has changed. -Decider('MD5-timestamp') - -# Since we set the -j value by default, suppress SCons warnings about being -# unable to support parallel build on versions of Python with no threading. -default_warnings = ['no-no-parallel-support'] -SetOption('warn', default_warnings + GetOption('warn')) - -AddOption('--mode', nargs=1, dest='conf_list', default=[], - action='append', help='Configuration to build.') - -AddOption('--verbose', dest='verbose', default=False, - action='store_true', help='Verbose command-line output.') - - -# -sconscript_file_map = %(sconscript_files)s - -class LoadTarget: - ''' - Class for deciding if a given target sconscript is to be included - based on a list of included target names, optionally prefixed with '-' - to exclude a target name. - ''' - def __init__(self, load): - ''' - Initialize a class with a list of names for possible loading. - - Arguments: - load: list of elements in the LOAD= specification - ''' - self.included = set([c for c in load if not c.startswith('-')]) - self.excluded = set([c[1:] for c in load if c.startswith('-')]) - - if not self.included: - self.included = set(['all']) - - def __call__(self, target): - ''' - Returns True if the specified target's sconscript file should be - loaded, based on the initialized included and excluded lists. - ''' - return (target in self.included or - ('all' in self.included and not target in self.excluded)) - -if 'LOAD' in ARGUMENTS: - load = ARGUMENTS['LOAD'].split(',') -else: - load = [] -load_target = LoadTarget(load) - -sconscript_files = [] -for target, sconscript in sconscript_file_map.iteritems(): - if load_target(target): - sconscript_files.append(sconscript) - - -target_alias_list= [] - -conf_list = GetOption('conf_list') -if conf_list: - # In case the same --mode= value was specified multiple times. - conf_list = list(set(conf_list)) -else: - conf_list = [%(default_configuration)r] - -sconsbuild_dir = Dir(%(sconsbuild_dir)s) - - -def FilterOut(self, **kw): - kw = SCons.Environment.copy_non_reserved_keywords(kw) - for key, val in kw.items(): - envval = self.get(key, None) - if envval is None: - # No existing variable in the environment, so nothing to delete. - continue - - for vremove in val: - # Use while not if, so we can handle duplicates. - while vremove in envval: - envval.remove(vremove) - - self[key] = envval - - # TODO(sgk): SCons.Environment.Append() has much more logic to deal - # with various types of values. We should handle all those cases in here - # too. (If variable is a dict, etc.) - - -non_compilable_suffixes = { - 'LINUX' : set([ - '.bdic', - '.css', - '.dat', - '.fragment', - '.gperf', - '.h', - '.hh', - '.hpp', - '.html', - '.hxx', - '.idl', - '.in', - '.in0', - '.in1', - '.js', - '.mk', - '.rc', - '.sigs', - '', - ]), - 'WINDOWS' : set([ - '.h', - '.hh', - '.hpp', - '.dat', - '.idl', - '.in', - '.in0', - '.in1', - ]), -} - -def compilable(env, file): - base, ext = os.path.splitext(str(file)) - if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]: - return False - return True - -def compilable_files(env, sources): - return [x for x in sources if compilable(env, x)] - -def GypProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def GypTestProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(*result) - return result - -def GypLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Library(target, source, *args, **kw) - return result - -def GypLoadableModule(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.LoadableModule(target, source, *args, **kw) - return result - -def GypStaticLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.StaticLibrary(target, source, *args, **kw) - return result - -def GypSharedLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.SharedLibrary(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def add_gyp_methods(env): - env.AddMethod(GypProgram) - env.AddMethod(GypTestProgram) - env.AddMethod(GypLibrary) - env.AddMethod(GypLoadableModule) - env.AddMethod(GypStaticLibrary) - env.AddMethod(GypSharedLibrary) - - env.AddMethod(FilterOut) - - env.AddMethod(compilable) - - -base_env = Environment( - tools = %(scons_tools)s, - INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate', - LIB_DIR='$TOP_BUILDDIR/lib', - OBJ_DIR='$TOP_BUILDDIR/obj', - SCONSBUILD_DIR=sconsbuild_dir.abspath, - SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate', - SRC_DIR=Dir(%(src_dir)r), - TARGET_PLATFORM='LINUX', - TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME', - LIBPATH=['$LIB_DIR'], -) - -if not GetOption('verbose'): - base_env.SetDefault( - ARCOMSTR='Creating library $TARGET', - ASCOMSTR='Assembling $TARGET', - CCCOMSTR='Compiling $TARGET', - CONCATSOURCECOMSTR='ConcatSource $TARGET', - CXXCOMSTR='Compiling $TARGET', - LDMODULECOMSTR='Building loadable module $TARGET', - LINKCOMSTR='Linking $TARGET', - MANIFESTCOMSTR='Updating manifest for $TARGET', - MIDLCOMSTR='Compiling IDL $TARGET', - PCHCOMSTR='Precompiling $TARGET', - RANLIBCOMSTR='Indexing $TARGET', - RCCOMSTR='Compiling resource $TARGET', - SHCCCOMSTR='Compiling $TARGET', - SHCXXCOMSTR='Compiling $TARGET', - SHLINKCOMSTR='Linking $TARGET', - SHMANIFESTCOMSTR='Updating manifest for $TARGET', - ) - -add_gyp_methods(base_env) - -for conf in conf_list: - env = base_env.Clone(CONFIG_NAME=conf) - SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath) - for sconscript in sconscript_files: - target_alias = env.SConscript(sconscript, exports=['env']) - if target_alias: - target_alias_list.extend(target_alias) - -Default(Alias('all', target_alias_list)) - -help_fmt = ''' -Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ... - -Local command-line build options: - --mode=CONFIG Configuration to build: - --mode=Debug [default] - --mode=Release - --verbose Print actual executed command lines. - -Supported command-line build variables: - LOAD=[module,...] Comma-separated list of components to load in the - dependency graph ('-' prefix excludes) - PROGRESS=type Display a progress indicator: - name: print each evaluated target name - spinner: print a spinner every 5 targets - -The following TARGET names can also be used as LOAD= module names: - -%%s -''' - -if GetOption('help'): - def columnar_text(items, width=78, indent=2, sep=2): - result = [] - colwidth = max(map(len, items)) + sep - cols = (width - indent) / colwidth - if cols < 1: - cols = 1 - rows = (len(items) + cols - 1) / cols - indent = '%%*s' %% (indent, '') - sep = indent - for row in xrange(0, rows): - result.append(sep) - for i in xrange(row, len(items), rows): - result.append('%%-*s' %% (colwidth, items[i])) - sep = '\\n' + indent - result.append('\\n') - return ''.join(result) - - load_list = set(sconscript_file_map.keys()) - target_aliases = set(map(str, target_alias_list)) - - common = load_list and target_aliases - load_only = load_list - common - target_only = target_aliases - common - help_text = [help_fmt %% columnar_text(sorted(list(common)))] - if target_only: - fmt = "The following are additional TARGET names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(target_only)))) - if load_only: - fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(load_only)))) - Help(''.join(help_text)) -""" - -# TEMPLATE END -############################################################################# - - -def GenerateSConscriptWrapper(build_file, build_file_data, name, - output_filename, sconscript_files, - default_configuration): - """ - Generates the "wrapper" SConscript file (analogous to the Visual Studio - solution) that calls all the individual target SConscript files. - """ - output_dir = os.path.dirname(output_filename) - src_dir = build_file_data['_DEPTH'] - src_dir_rel = gyp.common.RelativePath(src_dir, output_dir) - if not src_dir_rel: - src_dir_rel = '.' - scons_settings = build_file_data.get('scons_settings', {}) - sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#') - scons_tools = scons_settings.get('tools', ['default']) - - sconscript_file_lines = ['dict('] - for target in sorted(sconscript_files.keys()): - sconscript = sconscript_files[target] - sconscript_file_lines.append(' %s = %r,' % (target, sconscript)) - sconscript_file_lines.append(')') - - fp = open(output_filename, 'w') - fp.write(header) - fp.write(_wrapper_template % { - 'default_configuration' : default_configuration, - 'name' : name, - 'scons_tools' : repr(scons_tools), - 'sconsbuild_dir' : repr(sconsbuild_dir), - 'sconscript_files' : '\n'.join(sconscript_file_lines), - 'src_dir' : src_dir_rel, - }) - fp.close() - - # Generate the SConstruct file that invokes the wrapper SConscript. - dir, fname = os.path.split(output_filename) - SConstruct = os.path.join(dir, 'SConstruct') - fp = open(SConstruct, 'w') - fp.write(header) - fp.write('SConscript(%s)\n' % repr(fname)) - fp.close() - - -def TargetFilename(target, build_file=None, output_suffix=''): - """Returns the .scons file name for the specified target. - """ - if build_file is None: - build_file, target = gyp.common.ParseQualifiedTarget(target)[:2] - output_file = os.path.join(os.path.dirname(build_file), - target + output_suffix + '.scons') - return output_file - - -def GenerateOutput(target_list, target_dicts, data, params): - """ - Generates all the output files for the specified targets. - """ - options = params['options'] - - if options.generator_output: - def output_path(filename): - return filename.replace(params['cwd'], options.generator_output) - else: - def output_path(filename): - return filename - - default_configuration = None - - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in scons build (target %s)' % - qualified_target) - scons_target = SCons.Target(spec) - if scons_target.is_ignored: - continue - - # TODO: assumes the default_configuration of the first target - # non-Default target is the correct default for all targets. - # Need a better model for handle variation between targets. - if (not default_configuration and - spec['default_configuration'] != 'Default'): - default_configuration = spec['default_configuration'] - - build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2] - output_file = TargetFilename(target, build_file, options.suffix) - if options.generator_output: - output_file = output_path(output_file) - - if not spec.has_key('libraries'): - spec['libraries'] = [] - - # Add dependent static library targets to the 'libraries' value. - deps = spec.get('dependencies', []) - spec['scons_dependencies'] = [] - for d in deps: - td = target_dicts[d] - target_name = td['target_name'] - spec['scons_dependencies'].append("Alias('%s')" % target_name) - if td['type'] in ('static_library', 'shared_library'): - libname = td.get('product_name', target_name) - spec['libraries'].append('lib' + libname) - if td['type'] == 'loadable_module': - prereqs = spec.get('scons_prerequisites', []) - # TODO: parameterize with <(SHARED_LIBRARY_*) variables? - td_target = SCons.Target(td) - td_target.target_prefix = '${SHLIBPREFIX}' - td_target.target_suffix = '${SHLIBSUFFIX}' - - GenerateSConscript(output_file, spec, build_file, data[build_file]) - - if not default_configuration: - default_configuration = 'Default' - - for build_file in sorted(data.keys()): - path, ext = os.path.splitext(build_file) - if ext != '.gyp': - continue - output_dir, basename = os.path.split(path) - output_filename = path + '_main' + options.suffix + '.scons' - - all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file) - sconscript_files = {} - for t in all_targets: - scons_target = SCons.Target(target_dicts[t]) - if scons_target.is_ignored: - continue - bf, target = gyp.common.ParseQualifiedTarget(t)[:2] - target_filename = TargetFilename(target, bf, options.suffix) - tpath = gyp.common.RelativePath(target_filename, output_dir) - sconscript_files[target] = tpath - - output_filename = output_path(output_filename) - if sconscript_files: - GenerateSConscriptWrapper(build_file, data[build_file], basename, - output_filename, sconscript_files, - default_configuration) diff --git a/third_party/gyp/pylib/gyp/generator/xcode.py b/third_party/gyp/pylib/gyp/generator/xcode.py deleted file mode 100644 index ff28ef2..0000000 --- a/third_party/gyp/pylib/gyp/generator/xcode.py +++ /dev/null @@ -1,1139 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import errno -import os -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = 'INTERMEDIATE_DIR' - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.dylib', - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, - 'OS': 'mac', - 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', - 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', - 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', - 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', - 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', - 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', - 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, - 'CONFIGURATION_NAME': '$(CONFIGURATION)', -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - 'mac_bundle_resources', - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - 'mac_bundle', - 'mac_bundle_resources', - 'xcode_create_dependents_test_runner', -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - 'mac_bundle_resources', -] - - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ - 'name': configuration_name}) - xccl.AppendProperty('buildConfigurations', xcbc) - xccl.SetProperty('defaultConfigurationName', configuration_names[0]) - return xccl - - -class XcodeProject(object): - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or '.') - self.project.SetProperty('projectDirPath', projectDirPath) - self.project_file = \ - gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError, e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty('targets'): - xccl = xct.GetProperty('buildConfigurationList') - xcbcs = xccl.GetProperty('buildConfigurations') - for xcbc in xcbcs: - name = xcbc.GetProperty('name') - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty('buildConfigurationList', xccl) - except: - import sys - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting(_intermediate_var, - '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') - xccl.SetBuildSetting(_shared_intermediate_var, - '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') - - # Set user-specified project-wide build settings. This is intended to be - # used very sparingly. Really, almost everything should go into - # target-specific build settings sections. The project-wide settings are - # only intended to be used in cases where Xcode attempts to resolve - # variable references in a project context as opposed to a target context, - # such as when resolving sourceTree references while building up the tree - # tree view for UI display. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): - xccl.SetBuildSetting(xck, xcv) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict['targets']: - target_name = target['target_name'] - toolset = target['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, - toolset) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties['targets'] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get('suppress_wildcard', False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == 'all': - has_custom_all = True; - - # If this target has a 'run_as' attribute, or is a test, add its - # target to the targets, and (if it's a test) add it the to the - # test targets. - is_test = int(target.get('test', 0)) - if target.get('run_as') or is_test: - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run ' + target_name, - 'productName': xcode_target.GetProperty('productName'), - 'buildConfigurationList': xccl, - }, - parent=self.project) - run_target.AddDependency(xcode_target) - - # The test runner target has a build phase that executes the - # test, if this has the 'test' attribute. If the 'run_as' tag - # doesn't exist (meaning that this must be a test), then we - # define a default test command line. - command = target.get('run_as', { - 'action': ['${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}'] - }) - - script = '' - if command.get('working_directory'): - script = script + 'cd "%s"\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get('working_directory')) - - if command.get('environment'): - script = script + "\n".join( - ['export %s="%s"' % - (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = '' - if is_test and serialize_all_tests: - command_prefix = \ -"""python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get('action'))) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - run_target.AppendProperty('buildPhases', ssbp) - - # Add the run target to the project file. - targets.append(run_target) - if is_test: - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties['targets']) == \ - len(ordinary_targets) + len(support_targets) - - self.project._properties['targets'] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'All', - }, - parent=self.project) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties['targets'].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'Run All Tests', - }, - parent=self.project) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties['targets'].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict['targets']: - if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): - tgt_name = bf_tgt['target_name'] - toolset = bf_tgt['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, - tgt_name, toolset) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty('dependencies') - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty('targetProxy') - dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') - target_dict = xcode_target_to_target_dict[dependency_xct] - if target_dict and int(target_dict.get('test', 0)): - assert dependency_xct.test_runner - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run %s Tests' % tgt_name, - 'productName': tgt_name, - }, - parent=self.project) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties['targets'].index(xcode_target) - self.project._properties['targets'].insert(idx + 1, run_all_target) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects.keys(): - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = \ - tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', - dir=self.path) - - try: - output_file = os.fdopen(output_fd, 'wb') - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, 'project.pbxproj') - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -cached_xcode_version = None -def InstalledXcodeVersion(): - """Fetches the installed version of Xcode, returns empty string if it is - unable to figure it out.""" - - global cached_xcode_version - if not cached_xcode_version is None: - return cached_xcode_version - - # Default to an empty string - cached_xcode_version = '' - - # Collect the xcodebuild's version information. - try: - import subprocess - cmd = ['/usr/bin/xcodebuild', '-version'] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - xcodebuild_version_info = proc.communicate()[0] - # Any error, return empty string - if proc.returncode: - xcodebuild_version_info = '' - except OSError: - # We failed to launch the tool - xcodebuild_version_info = '' - - # Pull out the Xcode version itself. - match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE) - if match_line: - cached_xcode_version = match_line.group(1) - # Done! - return cached_xcode_version - - -def AddSourceToTarget(source, pbxp, xct): - # TODO(mark): Perhaps this can be made a little bit fancier. - source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext != '': - ext = ext[1:].lower() - - if ext in source_extensions: - xct.SourcesPhase().AddFile(source) - else: - # Files that aren't added to a sources build phase can still go into - # the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -_xcode_variable_re = re.compile('(\$\((.*?)\))') -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches == None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if not variable in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -def EscapeXCodeArgument(s): - """We must escape the arguments that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals.""" - s = s.replace('\\', '\\\\') - s = s.replace('"', '\\"') - return '"' + s + '"' - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - parallel_builds = generator_flags.get('xcode_parallel_builds', True) - serialize_all_tests = \ - generator_flags.get('xcode_serialize_all_test_runs', True) - xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - if parallel_builds: - pbxp.SetProperty('attributes', - {'BuildIndependentTargetsInParallel': 'YES'}) - - main_group = pbxp.GetProperty('mainGroup') - build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) - main_group.AppendChild(build_group) - for included_file in build_file_dict['included_files']: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in xcode build (target %s)' % - qualified_target) - configuration_names = [spec['default_configuration']] - for configuration_name in sorted(spec['configurations'].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. We use the type - # with "+bundle" appended if the target has "mac_bundle" set. - _types = { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.apple.product-type.library.dynamic', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - 'executable+bundle': 'com.apple.product-type.application', - 'loadable_module+bundle': 'com.apple.product-type.bundle', - 'shared_library+bundle': 'com.apple.product-type.framework', - } - - target_properties = { - 'buildConfigurationList': xccl, - 'name': target_name, - } - - type = spec['type'] - is_bundle = int(spec.get('mac_bundle', 0)) - if type != 'none': - type_bundle_key = type - if is_bundle: - type_bundle_key += '+bundle' - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: - gyp.common.ExceptionAppend(e, "-- unknown product type while " - "writing target %s" % target_name) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - - target_product_name = spec.get('product_name') - if target_product_name is not None: - target_properties['productName'] = target_product_name - - xct = xctarget_type(target_properties, parent=pbxp, - force_outdir=spec.get('product_dir'), - force_prefix=spec.get('product_prefix'), - force_extension=spec.get('product_extension')) - pbxp.AppendProperty('targets', xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - # Xcode does not have a distinct type for loadable_modules that are pure - # BSD targets (ie-unbundled). It uses the same setup as a shared_library - # but the mach-o type is explictly set in the settings. So before we do - # anything else, for this one case, we stuff in that one setting. This - # would allow the other data in the spec to change it if need be. - if type == 'loadable_module' and not is_bundle: - xccl.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') - - spec_actions = spec.get('actions', []) - spec_rules = spec.get('rules', []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target as used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - if type != 'none' and (spec_actions or spec_rules): - support_xccl = CreateXCConfigurationList(configuration_names); - support_target_properties = { - 'buildConfigurationList': support_xccl, - 'name': target_name + ' Support', - } - if target_product_name: - support_target_properties['productName'] = \ - target_product_name + ' Support' - support_xct = \ - gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, - parent=pbxp) - pbxp.AppendProperty('targets', support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get('message') - if message: - message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) - else: - message = '' - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action['action']) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string) - - script = '' - # Include the optional message - if message_sh: - script += message_sh + '\n' - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': action['inputs'], - 'name': 'Action "' + action['action_name'] + '"', - 'outputPaths': action['outputs'], - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get('process_outputs_as_sources', False)): - for output in action['outputs']: - AddSourceToTarget(output, pbxp, xct) - - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - for output in action['outputs']: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule['extension']] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get('rule_sources', []): - rule_source_basename = posixpath.basename(rule_source) - (rule_source_root, rule_source_ext) = \ - posixpath.splitext(rule_source_basename) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - 'INPUT_FILE_BASE': rule_source_root, - 'INPUT_FILE_SUFFIX': rule_source_ext, - 'INPUT_FILE_NAME': rule_source_basename, - 'INPUT_FILE_PATH': rule_source, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get('outputs', []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaning $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( \ - concrete_outputs_for_this_rule_source) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get('process_outputs_as_sources', False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or \ - int(rule.get('process_outputs_as_mac_bundle_resources', False)): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get('message') - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = '@echo note: ' + ExpandXcodeVariables(message, - rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule['action']) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = '%s_%s.make' % (target_name, rule['rule_name']) - makefile_path = os.path.join(xcode_projects[build_file].path, - makefile_name) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, 'wb') - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write('all: \\\n') - for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] - if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (concrete_output, eol)) - - for (rule_source, concrete_outputs, message, action) in \ - zip(rule['rule_sources'], concrete_outputs_by_rule_source, - messages, actions): - makefile.write('\n') - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] - if concrete_output_index == 0: - bol = '' - else: - bol = ' ' - makefile.write('%s%s \\\n' % (bol, concrete_output)) - - concrete_output_dir = posixpath.dirname(concrete_output) - if (concrete_output_dir and - concrete_output_dir not in concrete_output_dirs): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(' : \\\n') - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] - if prerequisite_index == len(prerequisites) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (prerequisite, eol)) - - # Make sure that output directories exist before executing the rule - # action. - # TODO(mark): quote the list of concrete_output_dirs. - if len(concrete_output_dirs) > 0: - makefile.write('\tmkdir -p %s\n' % ' '.join(concrete_output_dirs)) - - # The rule message and action have already had the necessary variable - # substitutions performed. - if message: - makefile.write('\t%s\n' % message) - makefile.write('\t%s\n' % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = \ -"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" % makefile_name - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Rule "' + rule['rule_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - for group in ['inputs', 'inputs_excluded']: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get('sources', []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" if it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - # Add "copies". - for copy_group in spec.get('copies', []): - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ - 'name': 'Copy to ' + copy_group['destination'] - }, - parent=xct) - dest = copy_group['destination'] - if dest[0] not in ('/', '$'): - # Relative paths are relative to $(SRCROOT). - dest = '$(SRCROOT)/' + dest - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties['buildPhases'].insert(prebuild_index, pbxcp) - - for file in copy_group['files']: - pbxcp.AddFile(file) - - # Excluded files can also go into the project file. - for key in ['sources', 'mac_bundle_resources']: - excluded_key = key + '_excluded' - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - for action in spec.get('actions', []): - groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get('postbuilds', []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) - script = 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - xct.AppendProperty('buildPhases', ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if 'dependencies' in spec: - for dependency in spec['dependencies']: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if 'libraries' in spec: - for library in spec['libraries']: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - # TODO(mark): this logic isn't right. There are certain directories - # that are always searched, we should check to see if the library is - # in one of those directories, and if not, we should do the - # AppendBuildSetting thing. - if not posixpath.isabs(library) and not library.startswith('$'): - # TODO(mark): Need to check to see if library_dir is already in - # LIBRARY_SEARCH_PATHS. - library_dir = posixpath.dirname(library) - xct.AppendBuildSetting('LIBRARY_SEARCH_PATHS', library_dir) - - for configuration_name in configuration_names: - configuration = spec['configurations'][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get('mac_framework_dirs', []): - xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) - for include_dir in configuration.get('include_dirs', []): - xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) - if 'defines' in configuration: - for define in configuration['defines']: - set_define = EscapeXCodeArgument(define) - xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) - if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): - xcbc.SetBuildSetting(xck, xcv) - - build_files = [] - for build_file, build_file_dict in data.iteritems(): - if build_file.endswith('.gyp'): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, - xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff --git a/third_party/gyp/pylib/gyp/input.py b/third_party/gyp/pylib/gyp/input.py deleted file mode 100644 index b7a1ad2..0000000 --- a/third_party/gyp/pylib/gyp/input.py +++ /dev/null @@ -1,2220 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler -import copy -import gyp.common -import optparse -import os.path -import re -import shlex -import subprocess -import sys - - -# A list of types that are treated as linkable. -linkable_types = ['executable', 'shared_library', 'loadable_module'] - -# A list of sections that contain links to other targets. -dependency_sections = ['dependencies', 'export_dependent_settings'] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - 'destination', - 'files', - 'include_dirs', - 'inputs', - 'libraries', - 'outputs', - 'sources', -] -path_sections = [] - - -def IsPathSection(section): - # If section ends in one of these characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section[-1:] in ('=', '+', '?', '!'): - section = section[0:-1] - - if section in path_sections or \ - section.endswith('_dir') or section.endswith('_dirs') or \ - section.endswith('_file') or section.endswith('_files') or \ - section.endswith('_path') or section.endswith('_paths'): - return True - return False - - -# base_non_configuraiton_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - 'actions', - 'configurations', - 'copies', - 'default_configuration', - 'dependencies', - 'dependencies_original', - 'link_languages', - 'libraries', - 'postbuilds', - 'product_dir', - 'product_extension', - 'product_name', - 'product_prefix', - 'rules', - 'run_as', - 'sources', - 'suppress_wildcard', - 'target_name', - 'test', - 'toolset', - 'toolsets', - 'type', - 'variants', - - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - 'variables', -] -non_configuration_keys = [] - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - 'actions', - 'all_dependent_settings', - 'configurations', - 'dependencies', - 'direct_dependent_settings', - 'libraries', - 'link_settings', - 'sources', - 'target_name', - 'type', -] - -# Controls how the generator want the build file paths. -absolute_build_file_paths = False - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included == None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get('included', []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - - Note that this is slower than eval() is. - """ - - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) - - -def CheckNode(node, keypath): - if isinstance(node, Dict): - c = node.getChildren() - dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] - if key in dict: - raise KeyError, "Key '" + key + "' repeated at level " + \ - repr(len(keypath) + 1) + " with key path '" + \ - '.'.join(keypath) + "'" - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) - return dict - elif isinstance(node, List): - c = node.getChildren() - children = [] - for index, child in enumerate(c): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, Const): - return node.getChildren()[0] - else: - raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \ - "': " + repr(node) - - -def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, - is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() - else: - raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, - None) - except SyntaxError, e: - e.filename = build_file_path - raise - except Exception, e: - gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) - raise - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - try: - if is_target: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, includes, check) - else: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, None, check) - except Exception, e: - gyp.common.ExceptionAppend(e, - 'while reading includes of ' + build_file_path) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, - variables, includes, check): - includes_list = [] - if includes != None: - includes_list.extend(includes) - if 'includes' in subdict: - for include in subdict['includes']: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = \ - os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict['includes'] - - # Merge in the included files. - for include in includes_list: - if not 'included' in aux_data[subdict_path]: - aux_data[subdict_path]['included'] = [] - aux_data[subdict_path]['included'].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include) - - MergeDicts(subdict, - LoadOneBuildFile(include, data, aux_data, variables, None, - False, check), - subdict_path, include) - - # Recurse into subdictionaries. - for k, v in subdict.iteritems(): - if v.__class__ == dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, - None, check) - elif v.__class__ == list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, - check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, - variables, check): - for item in sublist: - if item.__class__ == dict: - LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, - variables, None, check) - elif item.__class__ == list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, - variables, check) - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if 'targets' in data: - target_list = data['targets'] - new_target_list = [] - for target in target_list: - global multiple_toolsets - if multiple_toolsets: - toolsets = target.get('toolsets', ['target']) - else: - toolsets = ['target'] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = copy.deepcopy(target) - new_target['toolset'] = build - new_target_list.append(new_target) - target['toolset'] = toolsets[0] - new_target_list.append(target) - data['targets'] = new_target_list - if 'conditions' in data: - for condition in data['conditions']: - if isinstance(condition, list): - for condition_dict in condition[1:]: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check): - global absolute_build_file_paths - - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == '': - variables['DEPTH'] = '.' - else: - variables['DEPTH'] = d.replace('\\', '/') - - # If the generator needs absolue paths, then do so. - if absolute_build_file_paths: - build_file_path = os.path.abspath(build_file_path) - - if build_file_path in data['target_build_files']: - # Already loaded. - return - data['target_build_files'].add(build_file_path) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'" % build_file_path) - - build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, - includes, True, check) - - # Store DEPTH for later use in generators. - build_file_data['_DEPTH'] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if 'included_files' in build_file_data: - raise KeyError, build_file_path + ' must not contain included_files key' - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data['included_files'] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = \ - gyp.common.RelativePath(included_file, - os.path.dirname(build_file_path)) - build_file_data['included_files'].append(included_relative) - - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict(build_file_data, False, variables, - build_file_path) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if 'target_defaults' in build_file_data: - index = 0 - if 'targets' in build_file_data: - while index < len(build_file_data['targets']): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data['targets'][index] - new_target_dict = copy.deepcopy(build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) - build_file_data['targets'][index] = new_target_dict - index = index + 1 - else: - raise Exception, \ - "Unable to find targets in build file %s" % build_file_path - - # No longer needed. - del build_file_data['target_defaults'] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - if 'targets' in build_file_data: - for target_dict in build_file_data['targets']: - if 'dependencies' not in target_dict: - continue - for dependency in target_dict['dependencies']: - other_build_file = \ - gyp.common.ResolveTarget(build_file_path, dependency, None)[0] - try: - LoadTargetBuildFile(other_build_file, data, aux_data, variables, - includes, depth, check) - except Exception, e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) - raise - - return data - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -def FindEnclosingBracketGroup(input): - brackets = { '}': '{', - ']': '[', - ')': '(', } - stack = [] - count = 0 - start = -1 - for char in input: - if char in brackets.values(): - stack.append(char) - if start == -1: - start = count - if char in brackets.keys(): - try: - last_bracket = stack.pop() - except IndexError: - return (-1, -1) - if last_bracket != brackets[char]: - return (-1, -1) - if len(stack) == 0: - return (start, count + 1) - count = count + 1 - return (-1, -1) - - -canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$') - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - if not isinstance(string, str) or not canonical_int_re.match(string): - return False - - return True - - -early_variable_re = re.compile('(?P(?P<((!?@?)|\|)?)' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') -late_variable_re = re.compile('(?P(?P>((!?@?)|\|)?)' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == 'win32': - if type(cmd) == list: - cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] - else: - cmd = re.sub('^cat ', 'type ', cmd) - return cmd - - -def ExpandVariables(input, is_late, variables, build_file): - # Look for the pattern that gets expanded into variables - if not is_late: - variable_re = early_variable_re - expansion_symbol = '<' - else: - variable_re = late_variable_re - expansion_symbol = '>' - - input_str = str(input) - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol in input_str: - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = [match for match in variable_re.finditer(input_str)] - else: - matches = None - - output = input_str - if matches: - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Matches: %s" % repr(match)) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). - - # run_command is true if a ! variant is used. - run_command = '!' in match['type'] - - # file_list is true if a | variant is used. - file_list = '|' in match['type'] - - # Capture these now so we can adjust them later. - replace_start = match_group.start('replace') - replace_end = match_group.end('replace') - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, is_late, - processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, is_late, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = '@' in match['type'] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == '': - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - if type(contents) == list: - contents_list = contents - else: - contents_list = contents.split(' ') - replacement = contents_list[0] - path = replacement - if not os.path.isabs(path): - path = os.path.join(build_file_dir, path) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write('%s\n' % i) - f.close() - - elif run_command: - use_shell = True - if match['is_array']: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. - # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is - # possible that the command being invoked depends on the current - # directory. For that case the syntax needs to be extended so that the - # directory is also used in cache_key (it becomes a tuple). - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = str(contents) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'" % - (contents,build_file_dir)) - - # Fix up command with platform specific workarounds. - contents = FixupPlatformCommand(contents) - p = subprocess.Popen(contents, shell=use_shell, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - stdin=subprocess.PIPE, - cwd=build_file_dir) - - (p_stdout, p_stderr) = p.communicate('') - - if p.wait() != 0 or p_stderr: - sys.stderr.write(p_stderr) - # Simulate check_call behavior, since check_call only exists - # in python 2.5 and later. - raise Exception("Call to '%s' returned exit status %d." % - (contents, p.returncode)) - replacement = p_stdout.rstrip() - - cached_command_results[cache_key] = replacement - else: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Had cache value for command '%s' in directory '%s'" % - (contents,build_file_dir)) - replacement = cached_value - - else: - if not contents in variables: - raise KeyError, 'Undefined variable ' + contents + \ - ' in ' + build_file - replacement = variables[contents] - - if isinstance(replacement, list): - for item in replacement: - if not isinstance(item, str) and not isinstance(item, int): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'list contains a ' + \ - item.__class__.__name__ - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList(replacement, is_late, variables, - build_file) - elif not isinstance(replacement, str) and \ - not isinstance(replacement, int): - raise TypeError, 'Variable ' + contents + \ - ' must expand to a string or list of strings; ' + \ - 'found a ' + replacement.__class__.__name__ - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if isinstance(replacement, list): - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = '' - if isinstance(replacement, list): - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = output[:replace_start] + str(encoded_replacement) + \ - output[replace_end:] - # Prepare for the next match iteration. - input_str = output - - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Found output %s, recursing." % repr(output)) - if isinstance(output, list): - new_output = [] - for item in output: - new_output.append(ExpandVariables(item, is_late, variables, build_file)) - output = new_output - else: - output = ExpandVariables(output, is_late, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if isinstance(output, list): - for index in xrange(0, len(output)): - if IsStrCanonicalInt(output[index]): - output[index] = int(output[index]) - elif IsStrCanonicalInt(output): - output = int(output) - - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Expanding %s to %s" % (repr(input), repr(output))) - return output - - -def ProcessConditionsInDict(the_dict, is_late, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on is_late. If is_late is False, 'conditions' is used. - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to is_late, immediately - # prior to being merged. - - if not is_late: - conditions_key = 'conditions' - else: - conditions_key = 'target_conditions' - - if not conditions_key in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - if not isinstance(condition, list): - raise TypeError, conditions_key + ' must be a list' - if len(condition) != 2 and len(condition) != 3: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise IndexError, conditions_key + ' ' + condition[0] + \ - ' must be length 2 or 3, not ' + len(condition) - - [cond_expr, true_dict] = condition[0:2] - false_dict = None - if len(condition) == 3: - false_dict = condition[2] - - # Do expansions on the condition itself. Since the conditon can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables, - build_file) - if not isinstance(cond_expr_expanded, str) and \ - not isinstance(cond_expr_expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ - - try: - ast_code = compile(cond_expr_expanded, '', 'eval') - - if eval(ast_code, {'__builtins__': None}, variables): - merge_dict = true_dict - else: - merge_dict = false_dict - except SyntaxError, e: - syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' - 'at character %d.' % - (str(e.args[0]), e.text, build_file, e.offset), - e.filename, e.lineno, e.offset, e.text) - raise syntax_error - except NameError, e: - gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % - (cond_expr_expanded, build_file)) - raise - - if merge_dict != None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict(merge_dict, is_late, - variables, build_file) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): - if isinstance(value, str) or isinstance(value, int) or \ - isinstance(value, list): - variables['_' + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): - if not isinstance(value, str) and not isinstance(value, int) and \ - not isinstance(value, list): - continue - - if key.endswith('%'): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key is 'variables' and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in, - build_file, the_dict_key=None): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if 'variables' in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late, - variables, build_file, 'variables') - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.iteritems(): - # Skip "variables", which was already processed if present. - if key != 'variables' and isinstance(value, str): - expanded = ExpandVariables(value, is_late, variables, build_file) - if not isinstance(expanded, str) and not isinstance(expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ + ' for ' + key - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, is_late, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): - # Skip "variables" and string values, which were already processed if - # present. - if key == 'variables' or isinstance(value, str): - continue - if isinstance(value, dict): - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict(value, is_late, variables, - build_file, key) - elif isinstance(value, list): - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, is_late, variables, - build_file) - elif not isinstance(value, int): - raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ - ' for ' + key - - -def ProcessVariablesAndConditionsInList(the_list, is_late, variables, - build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if isinstance(item, dict): - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file) - elif isinstance(item, list): - ProcessVariablesAndConditionsInList(item, is_late, variables, build_file) - elif isinstance(item, str): - expanded = ExpandVariables(item, is_late, variables, build_file) - if isinstance(expanded, str) or isinstance(expanded, int): - the_list[index] = expanded - elif isinstance(expanded, list): - del the_list[index] - for expanded_item in expanded: - the_list.insert(index, expanded_item) - index = index + 1 - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError, \ - 'Variable expansion in this context permits strings and ' + \ - 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ - index - elif not isinstance(item, int): - raise TypeError, 'Unknown type ' + item.__class__.__name__ + \ - ' at index ' + index - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data['target_build_files']: - for target in data[build_file].get('targets', []): - target_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) - if target_name in targets: - raise KeyError, 'Duplicate target definitions for ' + target_name - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - for target, target_dict in targets.iteritems(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict['toolset'] - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) - global multiple_toolsets - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget(dep_file, - dep_target, - dep_toolset) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if dependency_key != 'dependencies' and \ - dependency not in target_dict['dependencies']: - raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \ - ' of ' + target + ', but not in dependencies' - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.iteritems(): - toolset = target_dict['toolset'] - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in xrange" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - (dependency_build_file, dependency_target, dependency_toolset) = \ - gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != '*' and dependency_toolset != '*': - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \ - target + ' referring to same build file' - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]['targets'] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get('suppress_wildcard', False)): - continue - dependency_target_name = dependency_target_dict['target_name'] - if (dependency_target != '*' and - dependency_target != dependency_target_name): - continue - dependency_target_toolset = dependency_target_dict['toolset'] - if (dependency_toolset != '*' and - dependency_toolset != dependency_target_toolset): - continue - dependency = gyp.common.QualifiedTarget(dependency_build_file, - dependency_target_name, - dependency_target_toolset) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -class DependencyGraphNode(object): - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(Exception): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = [] - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = self.dependents[:] - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop(0) - flat_list.append(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in node.dependents: - is_in_degree_zero = True - for node_dependent_dependency in node_dependent.dependencies: - if not node_dependent_dependency.ref in flat_list: - # The dependent one or more dependencies not in flat_list. There - # will be more chances to add it to flat_list when examining - # it again as a dependent of those other dependencies, provided - # that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros.append(node_dependent) - - return flat_list - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies == None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in \ - dependency_dict.get('export_dependent_settings', []): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns a list of all of a target's dependencies, recursively.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - dependency.DeepDependencies(dependencies) - - return dependencies - - def LinkDependencies(self, targets, dependencies=None, initial=True): - """Returns a list of dependency targets that are linked into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect depenedencies - that are linked into the linkable target for which the list is being built. - """ - if dependencies == None: - dependencies = [] - - # Check for None, corresponding to the root node. - if self.ref == None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if not 'target_name' in targets[self.ref]: - raise Exception("Missing 'target_name' field in target.") - - try: - target_type = targets[self.ref]['type'] - except KeyError, e: - raise Exception("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Executables and loadable modules are already fully and finally linked. - # Nothing else can be a link dependency of them, there can only be - # dependencies in the sense that a dependent target might run an - # executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module'): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - if target_type != 'none': - # Special case: "none" type targets don't produce any linkable products - # and shouldn't be exposed as link dependencies, although dependencies - # of "none" type targets may still be link dependencies. - dependencies.append(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency.LinkDependencies(targets, dependencies, False) - - return dependencies - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.iteritems(): - if not target in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): - target_node = dependency_nodes[target] - target_build_file = gyp.common.BuildFile(target) - if not 'dependencies' in spec or len(spec['dependencies']) == 0: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - dependencies = spec['dependencies'] - for index in xrange(0, len(dependencies)): - try: - dependency = dependencies[index] - dependency_node = dependency_nodes[dependency] - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - except KeyError, e: - gyp.common.ExceptionAppend(e, - 'while trying to load target %s' % target) - raise - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). If you need to figure out what's wrong, look for elements of - # targets that are not in flat_list. - if len(flat_list) != len(targets): - raise DependencyGraphNode.CircularException, \ - 'Some targets not reachable, cycle in dependency graph detected' - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets.iterkeys(): - build_file = gyp.common.BuildFile(target) - if not build_file in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.iteritems(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get('dependencies', []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes[dependency_build_file] - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - except KeyError, e: - gyp.common.ExceptionAppend( - e, 'while computing dependencies of .gyp file %s' % build_file) - raise - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - bad_files = [] - for file in dependency_nodes.iterkeys(): - if not file in flat_list: - bad_files.append(file) - raise DependencyGraphNode.CircularException, \ - 'Some files not reachable, cycle in .gyp file dependency graph ' + \ - 'detected involving some or all of: ' + \ - ' '.join(bad_files) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == 'all_dependent_settings': - dependencies = dependency_nodes[target].DeepDependencies() - elif key == 'direct_dependent_settings': - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - elif key == 'link_settings': - dependencies = dependency_nodes[target].LinkDependencies(targets) - else: - raise KeyError, "DoDependentSettings doesn't know how to determine " + \ - 'dependencies for ' + key - - for dependency in dependencies: - dependency_dict = targets[dependency] - if not key in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts(target_dict, dependency_dict[key], - build_file, dependency_build_file) - - -def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict['type'] - - if target_type == 'static_library': - if not 'dependencies' in target_dict: - continue - - target_dict['dependencies_original'] = target_dict.get( - 'dependencies', [])[:] - - index = 0 - while index < len(target_dict['dependencies']): - dependency = target_dict['dependencies'][index] - dependency_dict = targets[dependency] - if dependency_dict['type'] == 'static_library' and \ - (not 'hard_dependency' in dependency_dict or \ - not dependency_dict['hard_dependency']): - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done - # when a dependent relies on some side effect other than just the - # build product, like a rule or action output. Take the dependency - # out of the list, and don't increment index because the next - # dependency to analyze will shift into the index formerly occupied - # by the one being removed. - del target_dict['dependencies'][index] - else: - index = index + 1 - - # If the dependencies list is empty, it's not needed, so unhook it. - if len(target_dict['dependencies']) == 0: - del target_dict['dependencies'] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].LinkDependencies(targets) - for dependency in link_dependencies: - if dependency == target: - continue - if not 'dependencies' in target_dict: - target_dict['dependencies'] = [] - if not dependency in target_dict['dependencies']: - target_dict['dependencies'].append(dependency) - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r'''["']?[-/$<>]''') - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - ret = os.path.normpath(os.path.join( - gyp.common.RelativePath(os.path.dirname(fro_file), - os.path.dirname(to_file)), - item)).replace('\\', '/') - if item[-1] == '/': - ret += '/' - return ret - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - prepend_index = 0 - - for item in fro: - singleton = False - if isinstance(item, str) or isinstance(item, int): - # The cheap and easy case. - if is_paths: - to_item = MakePathRelative(to_file, fro_file, item) - else: - to_item = item - - if not isinstance(item, str) or not item.startswith('-'): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif isinstance(item, dict): - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif isinstance(item, list): - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError, \ - 'Attempt to merge list item of unsupported type ' + \ - item.__class__.__name__ - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not to_item in to: - to.append(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if isinstance(v, str) or isinstance(v, int): - if not (isinstance(to[k], str) or isinstance(to[k], int)): - bad_merge = True - elif v.__class__ != to[k].__class__: - bad_merge = True - - if bad_merge: - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[k].__class__.__name__ + \ - ' for key ' + k - if isinstance(v, str) or isinstance(v, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif isinstance(v, dict): - # Recurse, guaranteeing copies will be made of objects that require it. - if not k in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif isinstance(v, list): - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == '=': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '?'] - to[list_base] = [] - elif ext == '+': - list_base = k[:-1] - lists_incompatible = [list_base + '=', list_base + '?'] - append = False - elif ext == '?': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '=', list_base + '+'] - else: - list_base = k - lists_incompatible = [list_base + '=', list_base + '?'] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise KeyError, 'Incompatible list policies ' + k + ' and ' + \ - list_incompatible - - if list_base in to: - if ext == '?': - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - if not isinstance(to[list_base], list): - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[list_base].__class__.__name__ + \ - ' for key ' + list_base + '(' + k + ')' - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError, \ - 'Attempt to merge dict value of unsupported type ' + \ - v.__class__.__name__ + ' for key ' + k - - -def MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, visited): - # Skip if previously visted. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict['configurations'][configuration] - - # Merge in parents. - for parent in configuration_dict.get('inherit_from', []): - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, parent, visited + [configuration]) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, - build_file, build_file) - - # Drop abstract. - if 'abstract' in new_configuration_dict: - del new_configuration_dict['abstract'] - - -def SetUpConfigurations(target, target_dict): - global non_configuration_keys - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ['=', '+', '?', '!', '/'] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if not 'configurations' in target_dict: - target_dict['configurations'] = {'Default': {}} - if not 'default_configuration' in target_dict: - concrete = [i for i in target_dict['configurations'].keys() - if not target_dict['configurations'][i].get('abstract')] - target_dict['default_configuration'] = sorted(concrete)[0] - - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - # Skip abstract configurations (saves work only). - if old_configuration_dict.get('abstract'): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = copy.deepcopy(target_dict) - - # Take out the bits that don't belong in a "configurations" section. - # Since configuration setup is done before conditional, exclude, and rules - # processing, be careful with handling of the suffix characters used in - # those phases. - delete_keys = [] - for key in new_configuration_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base in non_configuration_keys: - delete_keys.append(key) - - for key in delete_keys: - del new_configuration_dict[key] - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, []) - - # Put the new result back into the target dict as a configuration. - target_dict['configurations'][configuration] = new_configuration_dict - - # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if not key_base in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - # Check the configurations to see if they contain invalid keys. - for configuration in target_dict['configurations'].keys(): - configuration_dict = target_dict['configurations'][configuration] - for key in configuration_dict.keys(): - if key in invalid_configuration_keys: - raise KeyError, ('%s not allowed in the %s configuration, found in ' - 'target %s' % (key, configuration, target)) - - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'] ], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.iteritems(): - operation = key[-1] - if operation != '!' and operation != '/': - continue - - if not isinstance(value, list): - raise ValueError, name + ' key ' + key + ' must be list, not ' + \ - value.__class__.__name__ - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if not isinstance(the_dict[list_key], list): - raise ValueError, name + ' key ' + list_key + \ - ' must be list, not ' + \ - value.__class__.__name__ + ' when applying ' + \ - {'!': 'exclusion', '/': 'regex'}[operation] - - if not list_key in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + '!' - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): - if exclude_item == the_list[index]: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + '/' - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - for index in xrange(0, len(the_list)): - list_item = the_list[index] - if pattern_re.search(list_item): - # Regular expression match. - - if action == 'exclude': - # This item matches an exclude regex, so set its value to 0 - # (exclude). - list_actions[index] = 0 - elif action == 'include': - # This item matches an include regex, so set its value to 1 - # (include). - list_actions[index] = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError, 'Unrecognized action ' + action + ' in ' + \ - name + ' key ' + key - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + '_excluded' - if excluded_key in the_dict: - raise KeyError, \ - name + ' key ' + excluded_key + ' must not be present prior ' + \ - ' to applying exclusion/regex filters for ' + list_key - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): - if isinstance(value, dict): - ProcessListFiltersInDict(key, value) - elif isinstance(value, list): - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if isinstance(item, dict): - ProcessListFiltersInDict(name, item) - elif isinstance(item, list): - ProcessListFiltersInList(name, item) - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get('rules', []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule['rule_name'] - if rule_name in rule_names: - raise KeyError, 'rule %s exists in duplicate, target %s' % \ - (rule_name, target) - rule_names[rule_name] = rule - - rule_extension = rule['extension'] - if rule_extension in rule_extensions: - raise KeyError, ('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % \ - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if 'rule_sources' in rule: - raise KeyError, \ - 'rule_sources must not exist in input, target %s rule %s' % \ - (target, rule_name) - extension = rule['extension'] - - rule_sources = [] - source_keys = ['sources'] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith('.'): - source_extension = source_extension[1:] - if source_extension == extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule['rule_sources'] = rule_sources - - -def ValidateActionsInTarget(target, target_dict, build_file): - '''Validates the inputs to the actions in a target.''' - target_name = target_dict.get('target_name') - actions = target_dict.get('actions', []) - for action in actions: - action_name = action.get('action_name') - if not action_name: - raise Exception("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) - inputs = action.get('inputs', []) - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get('target_name') - run_as = target_dict.get('run_as') - if not run_as: - return - if not isinstance(run_as, dict): - raise Exception("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) - action = run_as.get('action') - if not action: - raise Exception("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) - if not isinstance(action, list): - raise Exception("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) - working_directory = run_as.get('working_directory') - if working_directory and not isinstance(working_directory, str): - raise Exception("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) - environment = run_as.get('environment') - if environment and not isinstance(environment, dict): - raise Exception("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if isinstance(v, int): - v = str(v) - the_dict[k] = v - elif isinstance(v, dict): - TurnIntIntoStrInDict(v) - elif isinstance(v, list): - TurnIntIntoStrInList(v) - - if isinstance(k, int): - the_dict[str(k)] = v - del the_dict[k] - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index in xrange(0, len(the_list)): - item = the_list[index] - if isinstance(item, int): - the_list[index] = str(item) - elif isinstance(item, dict): - TurnIntIntoStrInDict(item) - elif isinstance(item, list): - TurnIntIntoStrInList(item) - - -def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specifc data. - global path_sections - path_sections = base_path_sections[:] - path_sections.extend(generator_input_info['path_sections']) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info['non_configuration_keys']) - - # TODO(mark) handle variants if the generator doesn't want them directly. - generator_handles_variants = \ - generator_input_info['generator_handles_variants'] - - global absolute_build_file_paths - absolute_build_file_paths = \ - generator_input_info['generator_wants_absolute_build_file_paths'] - - global multiple_toolsets - multiple_toolsets = generator_input_info[ - 'generator_supports_multiple_toolsets'] - - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {'target_build_files': set()} - aux_data = {} - for build_file in build_files: - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_file = os.path.normpath(build_file) - try: - LoadTargetBuildFile(build_file, data, aux_data, variables, includes, - depth, check) - except Exception, e: - gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - # Handle dependent settings of various types. - for settings_type in ['all_dependent_settings', - 'direct_dependent_settings', - 'link_settings']: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict(target_dict, True, variables, - build_file) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] diff --git a/third_party/gyp/pylib/gyp/xcodeproj_file.py b/third_party/gyp/pylib/gyp/xcodeproj_file.py deleted file mode 100644 index ebae02e..0000000 --- a/third_party/gyp/pylib/gyp/xcodeproj_file.py +++ /dev/null @@ -1,2736 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.1 (specifically, -3.1.2), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -import posixpath -import re -import struct -import sys - -# hashlib is supplied as of Python 2.5 as the replacement interface for sha -# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import sha otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_sha1 = hashlib.sha1 -except ImportError: - import sha - _new_sha1 = sha.new - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile('^[A-Za-z0-9$./_]+$') - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile('___') - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[^ -~]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$') - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - -def ConvertVariablesToShellSyntax(input_string): - return re.sub('\$\((.*?)\)', '${\\1}', input_string) - -class XCObject(object): - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_requried is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attribues: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(' '): - _encode_transforms.append('\\U%04x' % i) - i = i + 1 - _encode_transforms[7] = '\\a' - _encode_transforms[8] = '\\b' - _encode_transforms[9] = '\\t' - _encode_transforms[10] = '\\n' - _encode_transforms[11] = '\\v' - _encode_transforms[12] = '\\f' - _encode_transforms[13] = '\\n' - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError, 'Strong dict for key ' + key + ' in ' + \ - self.__class__.__name__ - else: - that._properties[key] = value.copy() - else: - raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \ - ' for key ' + key + ' in ' + self.__class__.__name__ - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if 'name' in self._properties or \ - ('name' in self._schema and self._schema['name'][3]): - return self._properties['name'] - - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement Name' - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name != None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack('>i', len(data))) - hash.update(data) - - if hash == None: - hash = _new_sha1() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, hash.copy()) - - if overwrite or self.id == None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 - digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = '%08X%08X%08X' % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError, \ - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ - (descendant.id, str(descendant._properties), - str(other._properties), self._properties['rootObject'].Name()) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicing - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return '/* ' + comment.replace('*/', '(*)/') + ' */' - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == '\\': - return '\\\\' - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicing - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other nonprintable characters within the ASCII range (0 through 127 - # inclusive) are encoded as "\U001f" referring to the Unicode code point in - # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write('\t' * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicing Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = '' - comment = None - - if self._should_print_single_line: - sep = ' ' - element_tabs = '' - end_tabs = '' - else: - sep = '\n' - element_tabs = '\t' * (tabs + 1) - end_tabs = '\t' * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, unicode): - printable += self._EncodeString(value.encode('utf-8')) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString('') - else: - printable += self._EncodeString(value[0]) - else: - printable = '(' + sep - for item in value: - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item, flatten_list) + \ - ',' + sep - printable += end_tabs + ')' - elif isinstance(value, dict): - printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ - sep - printable += end_tabs + '}' - else: - raise TypeError, "Can't make " + value.__class__.__name__ + ' printable' - - if comment != None: - printable += ' ' + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = '' - after_kv = ' ' - else: - printable = '\t' * tabs - after_kv = '\n' - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == 'remoteGlobalIDString' and isinstance(self, - PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == 'buildSettings' and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable += self._XCPrintableValue(tabs, key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs, value_to_print, flatten_list) + \ - ';' + after_kv - except TypeError, e: - gyp.common.ExceptionAppend(e, - 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = '' - end_tabs = 0 - else: - sep = '\n' - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, '};\n') - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties == None: - return - - for property, value in properties.iteritems(): - # Make sure the property is in the schema. - if not property in self._schema: - raise KeyError, property + ' not in ' + self.__class__.__name__ - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + \ - ' must be list, not ' + value.__class__.__name__ - for item in value: - if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - 'item of ' + property + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - item.__class__.__name__ - elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + ' must be ' + \ - property_type.__name__ + ', not ' + value.__class__.__name__ - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe - # to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError, "Don't know how to copy a " + \ - value.__class__.__name__ + ' object for ' + \ - property + ' in ' + self.__class__.__name__ - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if not key in self._schema: - raise KeyError, key + ' not in ' + self.__class__.__name__ - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list' - if not isinstance(value, property_type): - raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - value.__class__.__name__ - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - if not key in self._properties: - self._properties[key] = [] - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and not property in self._properties: - raise KeyError, self.__class__.__name__ + ' requires ' + property - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and len(attributes) >= 5 and \ - not property in self._properties: - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update({ - 'comments': [0, str, 0, 0], - 'fileEncoding': [0, str, 0, 0], - 'includeInIndex': [0, int, 0, 0], - 'indentWidth': [0, int, 0, 0], - 'lineEnding': [0, int, 0, 0], - 'sourceTree': [0, str, 0, 1, ''], - 'tabWidth': [0, int, 0, 0], - 'usesTabs': [0, int, 0, 0], - 'wrapsLines': [0, int, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if 'path' in self._properties and not 'name' in self._properties: - path = self._properties['path'] - name = posixpath.basename(path) - if name != '' and path != name: - self.SetProperty('name', name) - - if 'path' in self._properties and \ - (not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == ''): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path']) - if source_tree != None: - self._properties['sourceTree'] = source_tree - if path != None: - self._properties['path'] = path - if source_tree != None and path == None and \ - not 'name' in self._properties: - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties['path'] - self._properties['name'] = source_tree - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - elif 'path' in self._properties: - return self._properties['path'] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties['mainGroup']: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if 'name' in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + '.name') - hashables.append(self._properties['name']) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path != None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + '.path') - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: 'file', - PBXGroup: 'group', - PBXVariantGroup: 'file', - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == 'group': - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products', - 'Build'] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and not other_name in order: - return -1 - if other_name in order and not self_name in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties['sourceTree'] != '': - components.append('$(' + self._properties['sourceTree'] + ')') - if 'path' in self._properties: - components.append(self._properties['path']) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and \ - (path == None or \ - (not path.startswith('/') and not path.startswith('$'))): - this_path = xche.PathFromSourceTreeAndPath() - if this_path != None and path != None: - path = posixpath.join(this_path, path) - elif this_path != None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update({ - 'children': [1, XCHierarchicalElement, 1, 1, []], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get('children', []): - self._AddChildToDicts(child) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError, 'Found multiple children with path ' + child_path - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get('name', None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError, 'Found multiple PBXVariantGroup children with ' + \ - 'name ' + str(child_name) + ' and path ' + \ - str(child_path) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty('children', child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties['remoteRef'] - if container_proxy._properties['remoteGlobalIDString'] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith('/'): - is_dir = True - normpath = posixpath.normpath(path) - if is_dir: - normpath = path + '/' - else: - normpath = path - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == '.lproj': - variant_name = parent_root - if grandparent == '': - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name == None - - path_split = path.split(posixpath.sep) - if len(path_split) == 1 or \ - ((is_dir or variant_name != None) and len(path_split) == 2) or \ - not hierarchical: - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name == None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(normpath) - if file_ref != None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({'path': path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref != None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference({'name': variant_name, - 'path': variant_path}) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref != None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({'path': next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), - hierarchical) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {'name': name} - if path != None: - variant_group_properties['path'] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while len(self._properties['children']) == 1 and \ - self._properties['children'][0].__class__ == PBXGroup: - # Loop to take over the innermost only-child group possible. - - child = self._properties['children'][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == '': - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if 'path' in old_properties: - if 'path' in self._properties: - # Both the original parent and child have paths set. - self._properties['path'] = posixpath.join(old_properties['path'], - self._properties['path']) - else: - # Only the original parent has a path, use it. - self._properties['path'] = old_properties['path'] - if 'sourceTree' in old_properties: - # The original parent had a sourceTree set, use it. - self._properties['sourceTree'] = old_properties['sourceTree'] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if 'name' in old_properties and old_properties['name'] != None and \ - old_properties['name'] != self.Name(): - self._properties['name'] = old_properties['name'] - if 'name' in self._properties and 'path' in self._properties and \ - self._properties['name'] == self._properties['path']: - del self._properties['name'] - - # Notify all children of their new parent. - for child in self._properties['children']: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties['children']: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties['children'] = \ - sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y)) - - # Recurse. - for child in self._properties['children']: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while xche != None and isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'explicitFileType': [0, str, 0, 0], - 'lastKnownFileType': [0, str, 0, 0], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 1], - }) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if 'path' in self._properties and self._properties['path'].endswith('/'): - self._properties['path'] = self._properties['path'][:-1] - is_dir = True - else: - is_dir = False - - if 'path' in self._properties and \ - not 'lastKnownFileType' in self._properties and \ - not 'explicitFileType' in self._properties: - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - 'a': 'archive.ar', - 'app': 'wrapper.application', - 'bdic': 'file', - 'bundle': 'wrapper.cfbundle', - 'c': 'sourcecode.c.c', - 'cc': 'sourcecode.cpp.cpp', - 'cpp': 'sourcecode.cpp.cpp', - 'css': 'text.css', - 'cxx': 'sourcecode.cpp.cpp', - 'dylib': 'compiled.mach-o.dylib', - 'framework': 'wrapper.framework', - 'h': 'sourcecode.c.h', - 'hxx': 'sourcecode.cpp.h', - 'icns': 'image.icns', - 'java': 'sourcecode.java', - 'js': 'sourcecode.javascript', - 'm': 'sourcecode.c.objc', - 'mm': 'sourcecode.cpp.objcpp', - 'nib': 'wrapper.nib', - 'pdf': 'image.pdf', - 'pl': 'text.script.perl', - 'plist': 'text.plist.xml', - 'pm': 'text.script.perl', - 'png': 'image.png', - 'py': 'text.script.python', - 'r': 'sourcecode.rez', - 'rez': 'sourcecode.rez', - 's': 'sourcecode.asm', - 'strings': 'text.plist.strings', - 'ttf': 'file', - 'xcconfig': 'text.xcconfig', - 'xib': 'file.xib', - 'y': 'sourcecode.yacc', - } - - if is_dir: - file_type = 'folder' - else: - basename = posixpath.basename(self._properties['path']) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != '': - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, 'text') - - self._properties['lastKnownFileType'] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'baseConfigurationReference': [0, PBXFileReference, 0, 0], - 'buildSettings': [0, dict, 0, 1, {}], - 'name': [0, str, 0, 1], - }) - - def HasBuildSetting(self, key): - return key in self._properties['buildSettings'] - - def GetBuildSetting(self, key): - return self._properties['buildSettings'][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties['buildSettings'][key] = value - - def AppendBuildSetting(self, key, value): - if not key in self._properties['buildSettings']: - self._properties['buildSettings'][key] = [] - self._properties['buildSettings'][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties['buildSettings']: - del self._properties['buildSettings'][key] - - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ XCBuildConfiguration({'name': 'Debug'}), - XCBuildConfiguration({'name': 'Release'}) ] - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs], - 'defaultConfigurationIsVisible': [0, int, 0, 1, 1], - 'defaultConfigurationName': [0, str, 0, 1, 'Release'], - }) - - def Name(self): - return 'Build configuration list for ' + \ - self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"' - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties['buildConfigurations']: - if configuration._properties['name'] == name: - return configuration - - raise KeyError, name - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties['defaultConfigurationName']) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties['buildConfigurations']: - configuration_has = configuration.HasBuildSetting(key) - if has == None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value == None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties['buildConfigurations']: - configuration_value = configuration.GetBuildSetting(key) - if value == None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError, 'Variant values for ' + key - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.DelBuildSetting(key) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'fileRef': [0, XCFileLikeElement, 0, 1], - }) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties['fileRef'].Name() + ' in ' + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties['fileRef'].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildActionMask': [0, int, 0, 1, 0x7fffffff], - 'files': [1, PBXBuildFile, 1, 1, []], - 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get('files', []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement FileGroup' - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError, 'Found multiple build files with path ' + path - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties['fileRef'] - - paths = [] - if path != None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties['children']: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if xcfilelikeelement in self._files_by_xcfilelikeelement and \ - self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile: - raise ValueError, 'Found multiple build files for ' + \ - xcfilelikeelement.Name() - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty('files', pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and \ - isinstance(file_ref, PBXVariantGroup): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - pbxbuildfile = PBXBuildFile({'fileRef': file_ref}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Headers' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Resources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Sources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Frameworks' - - def FileGroup(self, path): - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'inputPaths': [1, str, 0, 1, []], - 'name': [0, str, 0, 0], - 'outputPaths': [1, str, 0, 1, []], - 'shellPath': [0, str, 0, 1, '/bin/sh'], - 'shellScript': [0, str, 0, 1], - 'showEnvVarsInLog': [0, int, 0, 0], - }) - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'ShellScript' - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'dstPath': [0, str, 0, 1], - 'dstSubfolderSpec': [0, int, 0, 1], - 'name': [0, str, 0, 0], - }) - - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 10, # Frameworks - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns - } - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'CopyFiles' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] - if relative_path == None: - relative_path = '' - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith('/'): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError, 'Can\'t use path %s in a %s' % \ - (path, self.__class__.__name__) - - self._properties['dstPath'] = relative_path - self._properties['dstSubfolderSpec'] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'compilerSpec': [0, str, 0, 1], - 'filePatterns': [0, str, 0, 0], - 'fileType': [0, str, 0, 1], - 'isEditable': [0, int, 0, 1, 1], - 'outputFiles': [1, str, 0, 1, []], - 'script': [0, str, 0, 0], - }) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties['fileType']) - if 'filePatterns' in self._properties: - hashables.append(self._properties['filePatterns']) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update({ - 'containerPortal': [0, XCContainerPortal, 0, 1], - 'proxyType': [0, int, 0, 1], - 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1], - 'remoteInfo': [0, str, 0, 1], - }) - - def __repr__(self): - props = self._properties - name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo']) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['containerPortal'].Hashables()) - hashables.extend(self._properties['remoteGlobalIDString'].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update({ - 'name': [0, str, 0, 0], - 'target': [0, None.__class__, 0, 0], - 'targetProxy': [0, PBXContainerItemProxy, 1, 1], - }) - - def __repr__(self): - name = self._properties.get('name') or self._properties['target'].Name() - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['targetProxy'].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'fileType': [0, str, 0, 1], - 'path': [0, str, 0, 1], - 'remoteRef': [0, PBXContainerItemProxy, 1, 1], - }) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update({ - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'buildPhases': [1, XCBuildPhase, 1, 1, []], - 'dependencies': [1, PBXTargetDependency, 1, 1, []], - 'name': [0, str, 0, 1], - 'productName': [0, str, 0, 1], - }) - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if 'name' in self._properties: - if not 'productName' in self._properties: - self.SetProperty('productName', self._properties['name']) - - if 'productName' in self._properties: - if 'buildConfigurationList' in self._properties: - configs = self._properties['buildConfigurationList'] - if configs.HasBuildSetting('PRODUCT_NAME') == 0: - configs.SetBuildSetting('PRODUCT_NAME', - self._properties['productName']) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # The easy case. Add a dependency to another target in the same - # project file. - container = PBXContainerItemProxy({'containerPortal': pbxproject, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name()}) - dependency = PBXTargetDependency({'target': other, - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - else: - # The hard case. Add a dependency to a target in a different project - # file. Actually, this case isn't really so hard. - other_project_ref = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy({ - 'containerPortal': other_project_ref, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name(), - }) - dependency = PBXTargetDependency({'name': other.Name(), - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties['buildConfigurationList'].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties['buildConfigurationList'].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties['buildConfigurationList'].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties['buildConfigurationList'].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].SetBuildSetting(key, \ - value) - - def AppendBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ - value) - - def DelBuildSetting(self, key): - return self._properties['buildConfigurationList'].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema['target'][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update({ - 'buildPhases': [1, XCBuildPhase, 1, 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]], - 'buildRules': [1, PBXBuildRule, 1, 1, []], - 'productReference': [0, PBXFileReference, 0, 1], - 'productType': [0, str, 0, 1], - }) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the filen ame - # set_xc_exe_prefix : bool to say if EXECUTABLE_PREFIX should be set to the - # prefix value. - _product_filetypes = { - 'com.apple.product-type.application': ['wrapper.application', - '', '.app', False], - 'com.apple.product-type.bundle': ['wrapper.cfbundle', - '', '.bundle', False], - 'com.apple.product-type.framework': ['wrapper.framework', - '', '.framework', False], - 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib', - 'lib', '.dylib', True], - 'com.apple.product-type.library.static': ['archive.ar', - 'lib', '.a', False], - 'com.apple.product-type.tool': ['compiled.mach-o.executable', - '', '', False], - } - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCTarget.__init__(self, properties, id, parent) - - if 'productName' in self._properties and \ - 'productType' in self._properties and \ - not 'productReference' in self._properties and \ - self._properties['productType'] in self._product_filetypes: - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject != None: - products_group = pbxproject.ProductsGroup() - - if products_group != None: - (filetype, prefix, suffix, set_xc_exe_prefix) = \ - self._product_filetypes[self._properties['productType']] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_EXTENSION', force_extension) - else: - # Extension override. - suffix = '.' + force_extension - self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension) - - if filetype.startswith('compiled.mach-o.executable'): - product_name = self._properties['productName'] - product_name += suffix - suffix = '' - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_PREFIX', prefix) - else: - self.SetBuildSetting('EXECUTABLE_PREFIX', prefix) - - if force_outdir is not None: - self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties['productName'] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - ref_props = { - 'explicitFileType': filetype, - 'includeInIndex': 0, - 'path': prefix + product_name + suffix, - 'sourceTree': 'BUILT_PRODUCTS_DIR', - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty('productReference', file_ref) - - def GetBuildPhaseByType(self, type): - if not 'buildPhases' in self._properties: - return None - - the_phase = None - for phase in self._properties['buildPhases']: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase == None - the_phase = phase - - return the_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase == None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase == None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty('buildPhases', sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase == None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty('buildPhases', frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = 'com.apple.product-type.library.static' - shared_library_type = 'com.apple.product-type.library.dynamic' - framework_type = 'com.apple.product-type.framework' - if isinstance(other, PBXNativeTarget) and \ - 'productType' in self._properties and \ - self._properties['productType'] != static_library_type and \ - 'productType' in other._properties and \ - (other._properties['productType'] == static_library_type or \ - ((other._properties['productType'] == shared_library_type or \ - other._properties['productType'] == framework_type) and \ - ((not other.HasBuildSetting('MACH_O_TYPE')) or - other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): - - file_ref = other.GetProperty('productReference') - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty('files', - PBXBuildFile({'fileRef': file_ref})) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update({ - 'attributes': [0, dict, 0, 0], - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.1'], - 'hasScannedForEncodings': [0, int, 0, 1, 1], - 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], - 'projectDirPath': [0, str, 0, 1, ''], - 'projectReferences': [1, dict, 0, 0], - 'projectRoot': [0, str, 0, 1, ''], - 'targets': [1, XCTarget, 1, 1, []], - }) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == '.xcodeproj': - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return 'Project object' - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if 'projectReferences' in self._properties: - for reference in self._properties['projectReferences']: - children.append(reference['ProductGroup']) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if not 'mainGroup' in self._properties: - self.SetProperty('mainGroup', PBXGroup()) - - main_group = self._properties['mainGroup'] - group = main_group.GetChildByName(name) - if group == None: - group = PBXGroup({'name': name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName('Source') - - def ProductsGroup(self): - return self._GroupByName('Products') - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName('Intermediates') - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName('Frameworks') - - def ProjectsGroup(self): - return self._GroupByName('Projects') - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree != None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties['mainGroup']._properties['children']: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties['mainGroup']._properties['children'] = \ - sorted(self._properties['mainGroup']._properties['children'], - cmp=lambda x,y: x.CompareRootGroup(y)) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties['mainGroup']._properties['children']: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == 'Products': - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties['productReference'] - # Make sure that the product is already in the products group. - assert product in group._properties['children'] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties['children']) - group._properties['children'] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if not 'projectReferences' in self._properties: - self._properties['projectReferences'] = [] - - product_group = None - project_ref = None - - if not other_pbxproject in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({'name': 'Products'}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty('projectDirPath') - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference({ - 'lastKnownFileType': 'wrapper.pb-project', - 'path': other_path, - 'sourceTree': 'SOURCE_ROOT', - }) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty('projectReferences', ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties['projectReferences'] = \ - sorted(self._properties['projectReferences'], cmp=lambda x,y: - cmp(x['ProjectRef'].Name().lower(), - y['ProjectRef'].Name().lower())) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict['ProductGroup'] - project_ref = project_ref_dict['ProjectRef'] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - return [product_group, project_ref] - - def _SetUpProductReferences(self, other_pbxproject, product_group, - project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties['productReference'] - if product_group.GetChildByRemoteObject(other_fileref) == None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy({ - 'containerPortal': project_ref, - 'proxyType': 2, - 'remoteGlobalIDString': other_fileref, - 'remoteInfo': target.Name() - }) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy({ - 'fileType': other_fileref._properties['explicitFileType'], - 'path': other_fileref._properties['path'], - 'sourceTree': other_fileref._properties['sourceTree'], - 'remoteRef': container_item, - }) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString'] - y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString'] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties['productReference']) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict['ProductGroup'] - product_group._properties['children'] = sorted( - product_group._properties['children'], - cmp=lambda x, y: CompareProducts(x, y, remote_products)) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'archiveVersion': [0, int, 0, 1, 1], - 'classes': [0, dict, 0, 1, {}], - 'objectVersion': [0, int, 0, 1, 45], - 'rootObject': [0, PBXProject, 1, 1], - }) - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties['objects'] = {} - self._XCPrint(file, 0, '// !$*UTF8*$!\n') - if self._should_print_single_line: - self._XCPrint(file, 0, '{ ') - else: - self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), - cmp=lambda x, y: cmp(x, y)): - if property == 'objects': - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, '}\n') - del self._properties['objects'] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, 'objects = {') - else: - self._XCPrint(file, 1, 'objects = {\n') - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if not class_name in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, '\n') - self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n') - for object in sorted(objects_by_class[class_name], - cmp=lambda x, y: cmp(x.id, y.id)): - object.Print(file) - self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n') - - if self._should_print_single_line: - self._XCPrint(file, 0, '}; ') - else: - self._XCPrint(file, 1, '};\n') diff --git a/third_party/gyp/samples/samples b/third_party/gyp/samples/samples deleted file mode 100755 index 804b618..0000000 --- a/third_party/gyp/samples/samples +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os.path -import shutil -import sys - - -gyps = [ - 'app/app.gyp', - 'base/base.gyp', - 'build/temp_gyp/googleurl.gyp', - 'build/all.gyp', - 'build/common.gypi', - 'build/external_code.gypi', - 'chrome/test/security_tests/security_tests.gyp', - 'chrome/third_party/hunspell/hunspell.gyp', - 'chrome/chrome.gyp', - 'media/media.gyp', - 'net/net.gyp', - 'printing/printing.gyp', - 'sdch/sdch.gyp', - 'skia/skia.gyp', - 'testing/gmock.gyp', - 'testing/gtest.gyp', - 'third_party/bzip2/bzip2.gyp', - 'third_party/icu38/icu38.gyp', - 'third_party/libevent/libevent.gyp', - 'third_party/libjpeg/libjpeg.gyp', - 'third_party/libpng/libpng.gyp', - 'third_party/libxml/libxml.gyp', - 'third_party/libxslt/libxslt.gyp', - 'third_party/lzma_sdk/lzma_sdk.gyp', - 'third_party/modp_b64/modp_b64.gyp', - 'third_party/npapi/npapi.gyp', - 'third_party/sqlite/sqlite.gyp', - 'third_party/zlib/zlib.gyp', - 'v8/tools/gyp/v8.gyp', - 'webkit/activex_shim/activex_shim.gyp', - 'webkit/activex_shim_dll/activex_shim_dll.gyp', - 'webkit/build/action_csspropertynames.py', - 'webkit/build/action_cssvaluekeywords.py', - 'webkit/build/action_jsconfig.py', - 'webkit/build/action_makenames.py', - 'webkit/build/action_maketokenizer.py', - 'webkit/build/action_useragentstylesheets.py', - 'webkit/build/rule_binding.py', - 'webkit/build/rule_bison.py', - 'webkit/build/rule_gperf.py', - 'webkit/tools/test_shell/test_shell.gyp', - 'webkit/webkit.gyp', -] - - -def Main(argv): - if len(argv) != 3 or argv[1] not in ['push', 'pull']: - print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] - return 1 - - path_to_chrome = argv[2] - - for g in gyps: - chrome_file = os.path.join(path_to_chrome, g) - local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) - if argv[1] == 'push': - print 'Copying %s to %s' % (local_file, chrome_file) - shutil.copyfile(local_file, chrome_file) - elif argv[1] == 'pull': - print 'Copying %s to %s' % (chrome_file, local_file) - shutil.copyfile(chrome_file, local_file) - else: - assert False - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff --git a/third_party/gyp/samples/samples.bat b/third_party/gyp/samples/samples.bat deleted file mode 100644 index 778d9c9..0000000 --- a/third_party/gyp/samples/samples.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python %~dp0/samples %* diff --git a/third_party/gyp/setup.py b/third_party/gyp/setup.py deleted file mode 100755 index ed2b41a..0000000 --- a/third_party/gyp/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from distutils.core import setup -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.command.install_scripts import install_scripts - -setup( - name='gyp', - version='0.1', - description='Generate Your Projects', - author='Chromium Authors', - author_email='chromium-dev@googlegroups.com', - url='http://code.google.com/p/gyp', - package_dir = {'': 'pylib'}, - packages=['gyp', 'gyp.generator'], - - scripts = ['gyp'], - cmdclass = {'install': install, - 'install_lib': install_lib, - 'install_scripts': install_scripts}, -) diff --git a/third_party/gyp/test/actions/gyptest-all.py b/third_party/gyp/test/actions/gyptest-all.py deleted file mode 100644 index 8db38d5..0000000 --- a/third_party/gyp/test/actions/gyptest-all.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using an explicit build target of 'all'. -""" - -import glob -import os -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# Test that an "always run" action increases a counter on multiple invocations, -# and that a dependent action updates in step. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1') -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -# The "always run" action only counts to 2, but the dependent target will count -# forever if it's allowed to run. This verifies that the dependent target only -# runs when the "always run" action generates new output, not just because the -# "always run" ran. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - - -test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n") - - -expect = "Hello from generate_main.py\n" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('null_input', chdir=chdir, stdout=expect) - - -# Clean out files which may have been created if test.ALL was run. -def clean_dep_files(): - for file in (glob.glob('relocate/src/dep_*.txt') + - glob.glob('relocate/src/deps_all_done_*.txt')): - if os.path.exists(file): - os.remove(file) - -# Confirm our clean. -clean_dep_files() -test.must_not_exist('relocate/src/dep_1.txt') -test.must_not_exist('relocate/src/deps_all_done_first_123.txt') - -# Make sure all deps finish before an action is run on a 'None' target. -# If using the Make builder, add -j to make things more difficult. -arguments = [] -if test.format == 'make': - arguments = ['-j'] -test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src', - arguments=arguments) -test.must_exist('relocate/src/deps_all_done_first_123.txt') - -# Try again with a target that has deps in reverse. Output files from -# previous tests deleted. Confirm this execution did NOT run the ALL -# target which would mess up our dep tests. -clean_dep_files() -test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src', - arguments=arguments) -test.must_exist('relocate/src/deps_all_done_first_321.txt') -test.must_not_exist('relocate/src/deps_all_done_first_123.txt') - - -test.pass_test() diff --git a/third_party/gyp/test/actions/gyptest-default.py b/third_party/gyp/test/actions/gyptest-default.py deleted file mode 100644 index c877867..0000000 --- a/third_party/gyp/test/actions/gyptest-default.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# Test that an "always run" action increases a counter on multiple invocations, -# and that a dependent action updates in step. -test.build('actions.gyp', chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1') -test.build('actions.gyp', chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -# The "always run" action only counts to 2, but the dependent target will count -# forever if it's allowed to run. This verifies that the dependent target only -# runs when the "always run" action generates new output, not just because the -# "always run" ran. -test.build('actions.gyp', test.ALL, chdir='relocate/src') -test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2') -test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - - -test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n") - - -expect = "Hello from generate_main.py\n" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('null_input', chdir=chdir, stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/actions/gyptest-errors.py b/third_party/gyp/test/actions/gyptest-errors.py deleted file mode 100644 index ca41487..0000000 --- a/third_party/gyp/test/actions/gyptest-errors.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies behavior for different action configuration errors: -exit status of 1, and the expected error message must be in stderr. -""" - -import TestGyp - -test = TestGyp.TestGyp() - - -test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None) -expect = [ - "Anonymous action in target broken_actions2. An action must have an 'action_name' field.", -] -test.must_contain_all_lines(test.stderr(), expect) - - -test.pass_test() diff --git a/third_party/gyp/test/actions/src/action_missing_name.gyp b/third_party/gyp/test/actions/src/action_missing_name.gyp deleted file mode 100644 index 00424c3..0000000 --- a/third_party/gyp/test/actions/src/action_missing_name.gyp +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'broken_actions2', - 'type': 'none', - 'actions': [ - { - 'inputs': [ - 'no_name.input', - ], - 'action': [ - 'python', - '-c', - 'print \'missing name\'', - ], - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/actions/src/actions.gyp b/third_party/gyp/test/actions/src/actions.gyp deleted file mode 100644 index 5d2db19..0000000 --- a/third_party/gyp/test/actions/src/actions.gyp +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - 'subdir3/null_input.gyp:*', - ], - }, - { - 'target_name': 'depend_on_always_run_action', - 'type': 'none', - 'dependencies': [ 'subdir1/executable.gyp:counter' ], - 'actions': [ - { - 'action_name': 'use_always_run_output', - 'inputs': [ - 'subdir1/actions-out/action-counter.txt', - 'subdir1/counter.py', - ], - 'outputs': [ - 'subdir1/actions-out/action-counter_2.txt', - ], - 'action': [ - 'python', 'subdir1/counter.py', '<(_outputs)', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - - # Three deps which don't finish immediately. - # Each one has a small delay then creates a file. - # Delays are 1.0, 1.1, and 2.0 seconds. - { - 'target_name': 'dep_1', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_1.txt' ], - 'action_name': 'dep_1', - 'action': [ 'python', '-c', - 'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - { - 'target_name': 'dep_2', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_2.txt' ], - 'action_name': 'dep_2', - 'action': [ 'python', '-c', - 'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - { - 'target_name': 'dep_3', - 'type': 'none', - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'dep_3.txt' ], - 'action_name': 'dep_3', - 'action': [ 'python', '-c', - 'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - - # An action which assumes the deps have completed. - # Does NOT list the output files of it's deps as inputs. - # On success create the file deps_all_done_first.txt. - { - 'target_name': 'action_with_dependencies_123', - 'type': 'none', - 'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ], - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'deps_all_done_first_123.txt' ], - 'action_name': 'action_with_dependencies_123', - 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - # Same as above but with deps in reverse. - { - 'target_name': 'action_with_dependencies_321', - 'type': 'none', - 'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ], - 'actions': [{ - 'inputs': [ 'actions.gyp' ], - 'outputs': [ 'deps_all_done_first_321.txt' ], - 'action_name': 'action_with_dependencies_321', - 'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }], - }, - - ], -} diff --git a/third_party/gyp/test/actions/src/confirm-dep-files.py b/third_party/gyp/test/actions/src/confirm-dep-files.py deleted file mode 100644 index 34efe28..0000000 --- a/third_party/gyp/test/actions/src/confirm-dep-files.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python - -# Confirm presence of files generated by our targets we depend on. -# If they exist, create a new file. -# -# Note target's input files are explicitly NOT defined in the gyp file -# so they can't easily be passed to this script as args. - -import os -import sys - -outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt -if (os.path.exists("dep_1.txt") and - os.path.exists("dep_2.txt") and - os.path.exists("dep_3.txt")): - open(outfile, "w") diff --git a/third_party/gyp/test/actions/src/subdir1/counter.py b/third_party/gyp/test/actions/src/subdir1/counter.py deleted file mode 100644 index 3612d7d..0000000 --- a/third_party/gyp/test/actions/src/subdir1/counter.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys -import time - -output = sys.argv[1] -persistoutput = "%s.persist" % sys.argv[1] - -count = 0 -try: - count = open(persistoutput, 'r').read() -except: - pass -count = int(count) + 1 - -if len(sys.argv) > 2: - max_count = int(sys.argv[2]) - if count > max_count: - count = max_count - -oldcount = 0 -try: - oldcount = open(output, 'r').read() -except: - pass - -# Save the count in a file that is undeclared, and thus hidden, to gyp. We need -# to do this because, prior to running commands, scons deletes any declared -# outputs, so we would lose our count if we just wrote to the given output file. -# (The other option is to use Precious() in the scons generator, but that seems -# too heavy-handed just to support this somewhat unrealistic test case, and -# might lead to unintended side-effects). -open(persistoutput, 'w').write('%d' % (count)) - -# Only write the given output file if the count has changed. -if int(oldcount) != count: - open(output, 'w').write('%d' % (count)) - # Sleep so the next run changes the file time sufficiently to make the build - # detect the file as changed. - time.sleep(1) - -sys.exit(0) diff --git a/third_party/gyp/test/actions/src/subdir1/executable.gyp b/third_party/gyp/test/actions/src/subdir1/executable.gyp deleted file mode 100644 index 6a1ce4f..0000000 --- a/third_party/gyp/test/actions/src/subdir1/executable.gyp +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - ], - 'actions': [ - { - 'action_name': 'make-prog1', - 'inputs': [ - 'make-prog1.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/prog1.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - { - 'action_name': 'make-prog2', - 'inputs': [ - 'make-prog2.py', - ], - 'outputs': [ - 'actions-out/prog2.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'counter', - 'type': 'none', - 'actions': [ - { - # This action should always run, regardless of whether or not it's - # inputs or the command-line change. We do this by creating a dummy - # first output, which is always missing, thus causing the build to - # always try to recreate it. Actual output files should be listed - # after the dummy one, and dependent targets should list the real - # output(s) in their inputs - # (see '../actions.gyp:depend_on_always_run_action'). - 'action_name': 'action_counter', - 'inputs': [ - 'counter.py', - ], - 'outputs': [ - 'actions-out/action-counter.txt.always', - 'actions-out/action-counter.txt', - ], - 'action': [ - 'python', '<(_inputs)', 'actions-out/action-counter.txt', '2', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/actions/src/subdir1/make-prog1.py b/third_party/gyp/test/actions/src/subdir1/make-prog1.py deleted file mode 100644 index 7ea1d8a..0000000 --- a/third_party/gyp/test/actions/src/subdir1/make-prog1.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog1(void) -{ - printf("Hello from make-prog1.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/actions/src/subdir1/make-prog2.py b/third_party/gyp/test/actions/src/subdir1/make-prog2.py deleted file mode 100644 index 0bfe497..0000000 --- a/third_party/gyp/test/actions/src/subdir1/make-prog2.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog2(void) -{ - printf("Hello from make-prog2.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/actions/src/subdir1/program.c b/third_party/gyp/test/actions/src/subdir1/program.c deleted file mode 100644 index d5f661d..0000000 --- a/third_party/gyp/test/actions/src/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/third_party/gyp/test/actions/src/subdir2/make-file.py b/third_party/gyp/test/actions/src/subdir2/make-file.py deleted file mode 100644 index fff0653..0000000 --- a/third_party/gyp/test/actions/src/subdir2/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = "Hello from make-file.py\n" - -open(sys.argv[1], 'wb').write(contents) diff --git a/third_party/gyp/test/actions/src/subdir2/none.gyp b/third_party/gyp/test/actions/src/subdir2/none.gyp deleted file mode 100644 index 2caa97d..0000000 --- a/third_party/gyp/test/actions/src/subdir2/none.gyp +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - 'file.out', - # TODO: enhance testing infrastructure to test this - # without having to hard-code the intermediate dir paths. - #'<(INTERMEDIATE_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - } - ], - }, - ], -} diff --git a/third_party/gyp/test/actions/src/subdir3/generate_main.py b/third_party/gyp/test/actions/src/subdir3/generate_main.py deleted file mode 100644 index b90b3aa..0000000 --- a/third_party/gyp/test/actions/src/subdir3/generate_main.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = """ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from generate_main.py\\n"); - return 0; -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/actions/src/subdir3/null_input.gyp b/third_party/gyp/test/actions/src/subdir3/null_input.gyp deleted file mode 100644 index 9b0bea5..0000000 --- a/third_party/gyp/test/actions/src/subdir3/null_input.gyp +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'null_input', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'generate_main', - 'process_outputs_as_sources': 1, - 'inputs': [], - 'outputs': [ - '<(INTERMEDIATE_DIR)/main.c', - ], - 'action': [ - # TODO: we can't just use <(_outputs) here?! - 'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c', - ], - # Allows the test to run without hermetic cygwin on windows. - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/actions-bare/gyptest-bare.py b/third_party/gyp/test/actions-bare/gyptest-bare.py deleted file mode 100644 index b0c1093..0000000 --- a/third_party/gyp/test/actions-bare/gyptest-bare.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies actions which are not depended on by other targets get executed. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('bare.gyp', chdir='src') -test.relocate('src', 'relocate/src') -test.build('bare.gyp', chdir='relocate/src') - -file_content = 'Hello from bare.py\n' - -test.built_file_must_match('out.txt', file_content, chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/actions-bare/src/bare.gyp b/third_party/gyp/test/actions-bare/src/bare.gyp deleted file mode 100644 index 3d28f09..0000000 --- a/third_party/gyp/test/actions-bare/src/bare.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'bare', - 'type': 'none', - 'actions': [ - { - 'action_name': 'action1', - 'inputs': [ - 'bare.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out.txt', - ], - 'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/actions-bare/src/bare.py b/third_party/gyp/test/actions-bare/src/bare.py deleted file mode 100644 index 970450e..0000000 --- a/third_party/gyp/test/actions-bare/src/bare.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -f = open(sys.argv[1], 'wb') -f.write('Hello from bare.py\n') -f.close() diff --git a/third_party/gyp/test/actions-subdir/gyptest-action.py b/third_party/gyp/test/actions-subdir/gyptest-action.py deleted file mode 100644 index 09cfef1..0000000 --- a/third_party/gyp/test/actions-subdir/gyptest-action.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Test actions that output to PRODUCT_DIR. -""" - -import TestGyp - -# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88 -test = TestGyp.TestGyp(formats=['!xcode']) - -test.run_gyp('none.gyp', chdir='src') - -test.build('none.gyp', test.ALL, chdir='src') - -file_content = 'Hello from make-file.py\n' -subdir_file_content = 'Hello from make-subdir-file.py\n' - -test.built_file_must_match('file.out', file_content, chdir='src') -test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src') - -test.pass_test() diff --git a/third_party/gyp/test/actions-subdir/src/make-file.py b/third_party/gyp/test/actions-subdir/src/make-file.py deleted file mode 100644 index 74e5581..0000000 --- a/third_party/gyp/test/actions-subdir/src/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = 'Hello from make-file.py\n' - -open(sys.argv[1], 'wb').write(contents) diff --git a/third_party/gyp/test/actions-subdir/src/none.gyp b/third_party/gyp/test/actions-subdir/src/none.gyp deleted file mode 100644 index 23f8d25..0000000 --- a/third_party/gyp/test/actions-subdir/src/none.gyp +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - 'dependencies': [ - 'subdir/subdir.gyp:subdir_file', - ], - }, - ], -} diff --git a/third_party/gyp/test/actions-subdir/src/subdir/make-subdir-file.py b/third_party/gyp/test/actions-subdir/src/subdir/make-subdir-file.py deleted file mode 100644 index 80ce19a..0000000 --- a/third_party/gyp/test/actions-subdir/src/subdir/make-subdir-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = 'Hello from make-subdir-file.py\n' - -open(sys.argv[1], 'wb').write(contents) diff --git a/third_party/gyp/test/actions-subdir/src/subdir/subdir.gyp b/third_party/gyp/test/actions-subdir/src/subdir/subdir.gyp deleted file mode 100644 index 0315d4e..0000000 --- a/third_party/gyp/test/actions-subdir/src/subdir/subdir.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'subdir_file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-subdir-file', - 'inputs': [ - 'make-subdir-file.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/subdir_file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - }, - ], -} diff --git a/third_party/gyp/test/additional-targets/gyptest-additional.py b/third_party/gyp/test/additional-targets/gyptest-additional.py deleted file mode 100644 index 02e7d7a..0000000 --- a/third_party/gyp/test/additional-targets/gyptest-additional.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple actions when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') -test.relocate('src', 'relocate/src') - -# Build all. -test.build('all.gyp', chdir='relocate/src') - -if test.format=='xcode': - chdir = 'relocate/src/dir1' -else: - chdir = 'relocate/src' - -# Output is as expected. -file_content = 'Hello from emit.py\n' -test.built_file_must_match('out2.txt', file_content, chdir=chdir) - -test.built_file_must_not_exist('out.txt', chdir='relocate/src') -test.built_file_must_not_exist('foolib1', - type=test.SHARED_LIB, - chdir=chdir) - -# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'? -if test.format == 'make': - chdir='relocate/src' -else: - chdir='relocate/src/dir1' - -# Build the action explicitly. -test.build('actions.gyp', 'action1_target', chdir=chdir) - -# Check that things got run. -file_content = 'Hello from emit.py\n' -test.built_file_must_exist('out.txt', chdir=chdir) - -# Build the shared library explicitly. -test.build('actions.gyp', 'foolib1', chdir=chdir) - -test.built_file_must_exist('foolib1', - type=test.SHARED_LIB, - chdir=chdir) - -test.pass_test() diff --git a/third_party/gyp/test/additional-targets/src/all.gyp b/third_party/gyp/test/additional-targets/src/all.gyp deleted file mode 100644 index 21c8308..0000000 --- a/third_party/gyp/test/additional-targets/src/all.gyp +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'all_targets', - 'type': 'none', - 'dependencies': ['dir1/actions.gyp:*'], - }, - ], -} diff --git a/third_party/gyp/test/additional-targets/src/dir1/actions.gyp b/third_party/gyp/test/additional-targets/src/dir1/actions.gyp deleted file mode 100644 index 5089c80..0000000 --- a/third_party/gyp/test/additional-targets/src/dir1/actions.gyp +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'action1_target', - 'type': 'none', - 'suppress_wildcard': 1, - 'actions': [ - { - 'action_name': 'action1', - 'inputs': [ - 'emit.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out.txt', - ], - 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'action2_target', - 'type': 'none', - 'actions': [ - { - 'action_name': 'action2', - 'inputs': [ - 'emit.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/out2.txt', - ], - 'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'foolib1', - 'type': 'shared_library', - 'suppress_wildcard': 1, - 'sources': ['lib1.c'], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/third_party/gyp/test/additional-targets/src/dir1/emit.py b/third_party/gyp/test/additional-targets/src/dir1/emit.py deleted file mode 100644 index 5638c43..0000000 --- a/third_party/gyp/test/additional-targets/src/dir1/emit.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -f = open(sys.argv[1], 'wb') -f.write('Hello from emit.py\n') -f.close() diff --git a/third_party/gyp/test/additional-targets/src/dir1/lib1.c b/third_party/gyp/test/additional-targets/src/dir1/lib1.c deleted file mode 100644 index df4cb10..0000000 --- a/third_party/gyp/test/additional-targets/src/dir1/lib1.c +++ /dev/null @@ -1,6 +0,0 @@ -#ifdef _WIN32 -__declspec(dllexport) -#endif -int func1(void) { - return 42; -} diff --git a/third_party/gyp/test/assembly/gyptest-assembly.py b/third_party/gyp/test/assembly/gyptest-assembly.py deleted file mode 100644 index 40d0a06..0000000 --- a/third_party/gyp/test/assembly/gyptest-assembly.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that .hpp files are ignored when included in the source list on all -platforms. -""" - -import sys -import TestGyp - -# TODO(bradnelson): get this working for windows. -test = TestGyp.TestGyp(formats=['make', 'scons', 'xcode']) - -test.run_gyp('assembly.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('assembly.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Got 42. -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/assembly/src/as.bat b/third_party/gyp/test/assembly/src/as.bat deleted file mode 100644 index 0a47382..0000000 --- a/third_party/gyp/test/assembly/src/as.bat +++ /dev/null @@ -1,4 +0,0 @@ -@echo off -:: Mock windows assembler. -cl /c %1 /Fo"%2" - diff --git a/third_party/gyp/test/assembly/src/assembly.gyp b/third_party/gyp/test/assembly/src/assembly.gyp deleted file mode 100644 index 872dd5e..0000000 --- a/third_party/gyp/test/assembly/src/assembly.gyp +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - 'defines': ['PLATFORM_WIN'], - }], - ['OS=="mac"', { - 'defines': ['PLATFORM_MAC'], - }], - ['OS=="linux"', { - 'defines': ['PLATFORM_LINUX'], - }], - ], - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': ['lib1'], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': 'static_library', - 'sources': [ - 'lib1.S', - ], - }, - ], - 'conditions': [ - ['OS=="win"', { - 'target_defaults': { - 'rules': [ - { - 'rule_name': 'assembler', - 'msvs_cygwin_shell': 0, - 'extension': 'S', - 'inputs': [ - 'as.bat', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj', - ], - 'action': - ['as.bat', 'lib1.c', '<(_outputs)'], - 'message': 'Building assembly file <(RULE_INPUT_PATH)', - 'process_outputs_as_sources': 1, - }, - ], - }, - },], - ], -} diff --git a/third_party/gyp/test/assembly/src/lib1.S b/third_party/gyp/test/assembly/src/lib1.S deleted file mode 100644 index e7102bf..0000000 --- a/third_party/gyp/test/assembly/src/lib1.S +++ /dev/null @@ -1,10 +0,0 @@ -#if PLATFORM_WINDOWS || PLATFORM_MAC -# define IDENTIFIER(n) _##n -#else /* Linux */ -# define IDENTIFIER(n) n -#endif - -.globl IDENTIFIER(lib1_function) -IDENTIFIER(lib1_function): - movl $42, %eax - ret diff --git a/third_party/gyp/test/assembly/src/lib1.c b/third_party/gyp/test/assembly/src/lib1.c deleted file mode 100644 index be21ecd..0000000 --- a/third_party/gyp/test/assembly/src/lib1.c +++ /dev/null @@ -1,3 +0,0 @@ -int lib1_function(void) { - return 42; -} diff --git a/third_party/gyp/test/assembly/src/program.c b/third_party/gyp/test/assembly/src/program.c deleted file mode 100644 index ecce3b0..0000000 --- a/third_party/gyp/test/assembly/src/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern int lib1_function(void); - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - fprintf(stdout, "Got %d.\n", lib1_function()); - fflush(stdout); - return 0; -} diff --git a/third_party/gyp/test/builddir/gyptest-all.py b/third_party/gyp/test/builddir/gyptest-all.py deleted file mode 100644 index 324d7fc..0000000 --- a/third_party/gyp/test/builddir/gyptest-all.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify the settings that cause a set of programs to be created in -a specific build directory, and that no intermediate built files -get created outside of that build directory hierarchy even when -referred to with deeply-nested ../../.. paths. -""" - -import TestGyp - -# TODO(mmoss): Make only supports (theoretically) a single, global build -# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than -# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other -# generators support, so this doesn't work yet for make. -# TODO(mmoss) Make also has the issue that the top-level Makefile is written to -# the "--depth" location, which is one level above 'src', but then this test -# moves 'src' somewhere else, leaving the Makefile behind, so make can't find -# its sources. I'm not sure if make is wrong for writing outside the current -# directory, or if the test is wrong for assuming everything generated is under -# the current directory. -test = TestGyp.TestGyp(formats=['!make']) - -test.run_gyp('prog1.gyp', '--depth=..', chdir='src') - -test.relocate('src', 'relocate/src') - -test.subdir('relocate/builddir') - -# Make sure that all the built ../../etc. files only get put under builddir, -# by making all of relocate read-only and then making only builddir writable. -test.writable('relocate', False) -test.writable('relocate/builddir', True) - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello from func1.c -""" - -expect2 = """\ -Hello from subdir2/prog2.c -Hello from func2.c -""" - -expect3 = """\ -Hello from subdir2/subdir3/prog3.c -Hello from func3.c -""" - -expect4 = """\ -Hello from subdir2/subdir3/subdir4/prog4.c -Hello from func4.c -""" - -expect5 = """\ -Hello from subdir2/subdir3/subdir4/subdir5/prog5.c -Hello from func5.c -""" - -def run_builddir(prog, expect): - dir = 'relocate/builddir/Default/' - test.run(program=test.workpath(dir + prog), stdout=expect) - -run_builddir('prog1', expect1) -run_builddir('prog2', expect2) -run_builddir('prog3', expect3) -run_builddir('prog4', expect4) -run_builddir('prog5', expect5) - -test.pass_test() diff --git a/third_party/gyp/test/builddir/gyptest-default.py b/third_party/gyp/test/builddir/gyptest-default.py deleted file mode 100644 index 6171d15..0000000 --- a/third_party/gyp/test/builddir/gyptest-default.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify the settings that cause a set of programs to be created in -a specific build directory, and that no intermediate built files -get created outside of that build directory hierarchy even when -referred to with deeply-nested ../../.. paths. -""" - -import TestGyp - -# TODO(mmoss): Make only supports (theoretically) a single, global build -# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than -# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other -# generators support, so this doesn't work yet for make. -# TODO(mmoss) Make also has the issue that the top-level Makefile is written to -# the "--depth" location, which is one level above 'src', but then this test -# moves 'src' somewhere else, leaving the Makefile behind, so make can't find -# its sources. I'm not sure if make is wrong for writing outside the current -# directory, or if the test is wrong for assuming everything generated is under -# the current directory. -test = TestGyp.TestGyp(formats=['!make']) - -test.run_gyp('prog1.gyp', '--depth=..', chdir='src') - -test.relocate('src', 'relocate/src') - -test.subdir('relocate/builddir') - -# Make sure that all the built ../../etc. files only get put under builddir, -# by making all of relocate read-only and then making only builddir writable. -test.writable('relocate', False) -test.writable('relocate/builddir', True) - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello from func1.c -""" - -expect2 = """\ -Hello from subdir2/prog2.c -Hello from func2.c -""" - -expect3 = """\ -Hello from subdir2/subdir3/prog3.c -Hello from func3.c -""" - -expect4 = """\ -Hello from subdir2/subdir3/subdir4/prog4.c -Hello from func4.c -""" - -expect5 = """\ -Hello from subdir2/subdir3/subdir4/subdir5/prog5.c -Hello from func5.c -""" - -def run_builddir(prog, expect): - dir = 'relocate/builddir/Default/' - test.run(program=test.workpath(dir + prog), stdout=expect) - -run_builddir('prog1', expect1) -run_builddir('prog2', expect2) -run_builddir('prog3', expect3) -run_builddir('prog4', expect4) -run_builddir('prog5', expect5) - -test.pass_test() diff --git a/third_party/gyp/test/builddir/src/builddir.gypi b/third_party/gyp/test/builddir/src/builddir.gypi deleted file mode 100644 index e3c6147..0000000 --- a/third_party/gyp/test/builddir/src/builddir.gypi +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Default': { - 'msvs_configuration_attributes': { - 'OutputDirectory': '<(DEPTH)\\builddir\Default', - }, - }, - }, - }, - 'scons_settings': { - 'sconsbuild_dir': '<(DEPTH)/builddir', - }, - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/builddir', - }, -} diff --git a/third_party/gyp/test/builddir/src/func1.c b/third_party/gyp/test/builddir/src/func1.c deleted file mode 100644 index b8e6a06..0000000 --- a/third_party/gyp/test/builddir/src/func1.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func1(void) -{ - printf("Hello from func1.c\n"); -} diff --git a/third_party/gyp/test/builddir/src/func2.c b/third_party/gyp/test/builddir/src/func2.c deleted file mode 100644 index 14aabac..0000000 --- a/third_party/gyp/test/builddir/src/func2.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func2(void) -{ - printf("Hello from func2.c\n"); -} diff --git a/third_party/gyp/test/builddir/src/func3.c b/third_party/gyp/test/builddir/src/func3.c deleted file mode 100644 index 3b4edea..0000000 --- a/third_party/gyp/test/builddir/src/func3.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func3(void) -{ - printf("Hello from func3.c\n"); -} diff --git a/third_party/gyp/test/builddir/src/func4.c b/third_party/gyp/test/builddir/src/func4.c deleted file mode 100644 index 732891b..0000000 --- a/third_party/gyp/test/builddir/src/func4.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func4(void) -{ - printf("Hello from func4.c\n"); -} diff --git a/third_party/gyp/test/builddir/src/func5.c b/third_party/gyp/test/builddir/src/func5.c deleted file mode 100644 index 18fdfab..0000000 --- a/third_party/gyp/test/builddir/src/func5.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func5(void) -{ - printf("Hello from func5.c\n"); -} diff --git a/third_party/gyp/test/builddir/src/prog1.c b/third_party/gyp/test/builddir/src/prog1.c deleted file mode 100644 index 674ca74..0000000 --- a/third_party/gyp/test/builddir/src/prog1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func1(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - func1(); - return 0; -} diff --git a/third_party/gyp/test/builddir/src/prog1.gyp b/third_party/gyp/test/builddir/src/prog1.gyp deleted file mode 100644 index 5b96f03..0000000 --- a/third_party/gyp/test/builddir/src/prog1.gyp +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'pull_in_all', - 'type': 'none', - 'dependencies': [ - 'prog1', - 'subdir2/prog2.gyp:prog2', - 'subdir2/subdir3/prog3.gyp:prog3', - 'subdir2/subdir3/subdir4/prog4.gyp:prog4', - 'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5', - ], - }, - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - 'func1.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/builddir/src/subdir2/prog2.c b/third_party/gyp/test/builddir/src/subdir2/prog2.c deleted file mode 100644 index bbdf4f0..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/prog2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/prog2.c\n"); - func2(); - return 0; -} diff --git a/third_party/gyp/test/builddir/src/subdir2/prog2.gyp b/third_party/gyp/test/builddir/src/subdir2/prog2.gyp deleted file mode 100644 index 96299b6..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/prog2.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - '../func2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.c b/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.c deleted file mode 100644 index 10c530b..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func3(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/prog3.c\n"); - func3(); - return 0; -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp b/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp deleted file mode 100644 index d7df43c..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'sources': [ - 'prog3.c', - '../../func3.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c b/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c deleted file mode 100644 index dcba9a9..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func4(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/subdir4/prog4.c\n"); - func4(); - return 0; -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp b/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp deleted file mode 100644 index 862a8a1..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog4', - 'type': 'executable', - 'sources': [ - 'prog4.c', - '../../../func4.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c b/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c deleted file mode 100644 index 69132e5..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void func5(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n"); - func5(); - return 0; -} diff --git a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp b/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp deleted file mode 100644 index fe1c9cb..0000000 --- a/third_party/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../../../builddir.gypi', - ], - 'targets': [ - { - 'target_name': 'prog5', - 'type': 'executable', - 'sources': [ - 'prog5.c', - '../../../../func5.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/compilable/gyptest-headers.py b/third_party/gyp/test/compilable/gyptest-headers.py deleted file mode 100644 index 9176021..0000000 --- a/third_party/gyp/test/compilable/gyptest-headers.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that .hpp files are ignored when included in the source list on all -platforms. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('headers.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('headers.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/compilable/src/headers.gyp b/third_party/gyp/test/compilable/src/headers.gyp deleted file mode 100644 index b6c2a88..0000000 --- a/third_party/gyp/test/compilable/src/headers.gyp +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1' - ], - 'sources': [ - 'program.cpp', - ], - }, - { - 'target_name': 'lib1', - 'type': 'static_library', - 'sources': [ - 'lib1.hpp', - 'lib1.cpp', - ], - }, - ], -} diff --git a/third_party/gyp/test/compilable/src/lib1.cpp b/third_party/gyp/test/compilable/src/lib1.cpp deleted file mode 100644 index 51bc31a..0000000 --- a/third_party/gyp/test/compilable/src/lib1.cpp +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "lib1.hpp" - -void lib1_function(void) { - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/compilable/src/lib1.hpp b/third_party/gyp/test/compilable/src/lib1.hpp deleted file mode 100644 index 72e63e8..0000000 --- a/third_party/gyp/test/compilable/src/lib1.hpp +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef _lib1_hpp -#define _lib1_hpp - -extern void lib1_function(void); - -#endif diff --git a/third_party/gyp/test/compilable/src/program.cpp b/third_party/gyp/test/compilable/src/program.cpp deleted file mode 100644 index 81420ba..0000000 --- a/third_party/gyp/test/compilable/src/program.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include -#include "lib1.hpp" - -int main(int argc, char *argv[]) { - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - lib1_function(); - return 0; -} diff --git a/third_party/gyp/test/configurations/basics/configurations.c b/third_party/gyp/test/configurations/basics/configurations.c deleted file mode 100644 index 6c1f900..0000000 --- a/third_party/gyp/test/configurations/basics/configurations.c +++ /dev/null @@ -1,15 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#ifdef FOO - printf("Foo configuration\n"); -#endif -#ifdef DEBUG - printf("Debug configuration\n"); -#endif -#ifdef RELEASE - printf("Release configuration\n"); -#endif - return 0; -} diff --git a/third_party/gyp/test/configurations/basics/configurations.gyp b/third_party/gyp/test/configurations/basics/configurations.gyp deleted file mode 100644 index 93f1d8d..0000000 --- a/third_party/gyp/test/configurations/basics/configurations.gyp +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - 'configurations': { - 'Debug': { - 'defines': [ - 'DEBUG', - ], - }, - 'Release': { - 'defines': [ - 'RELEASE', - ], - }, - 'Foo': { - 'defines': [ - 'FOO', - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/basics/gyptest-configurations.py b/third_party/gyp/test/configurations/basics/gyptest-configurations.py deleted file mode 100644 index 27cd2e8..0000000 --- a/third_party/gyp/test/configurations/basics/gyptest-configurations.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('configurations.gyp') - -test.set_configuration('Release') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Release configuration\n") - -test.set_configuration('Debug') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Debug configuration\n") - -test.set_configuration('Foo') -test.build('configurations.gyp') -test.run_built_executable('configurations', stdout="Foo configuration\n") - -test.pass_test() diff --git a/third_party/gyp/test/configurations/inheritance/configurations.c b/third_party/gyp/test/configurations/inheritance/configurations.c deleted file mode 100644 index 2d5565e..0000000 --- a/third_party/gyp/test/configurations/inheritance/configurations.c +++ /dev/null @@ -1,21 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#ifdef BASE - printf("Base configuration\n"); -#endif -#ifdef COMMON - printf("Common configuration\n"); -#endif -#ifdef COMMON2 - printf("Common2 configuration\n"); -#endif -#ifdef DEBUG - printf("Debug configuration\n"); -#endif -#ifdef RELEASE - printf("Release configuration\n"); -#endif - return 0; -} diff --git a/third_party/gyp/test/configurations/inheritance/configurations.gyp b/third_party/gyp/test/configurations/inheritance/configurations.gyp deleted file mode 100644 index 9441376..0000000 --- a/third_party/gyp/test/configurations/inheritance/configurations.gyp +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Base': { - 'abstract': 1, - 'defines': ['BASE'], - }, - 'Common': { - 'abstract': 1, - 'inherit_from': ['Base'], - 'defines': ['COMMON'], - }, - 'Common2': { - 'abstract': 1, - 'defines': ['COMMON2'], - }, - 'Debug': { - 'inherit_from': ['Common', 'Common2'], - 'defines': ['DEBUG'], - }, - 'Release': { - 'inherit_from': ['Common', 'Common2'], - 'defines': ['RELEASE'], - }, - }, - }, - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/configurations/inheritance/gyptest-inheritance.py b/third_party/gyp/test/configurations/inheritance/gyptest-inheritance.py deleted file mode 100644 index 22c73a3..0000000 --- a/third_party/gyp/test/configurations/inheritance/gyptest-inheritance.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('configurations.gyp') - -test.set_configuration('Release') -test.build('configurations.gyp') -test.run_built_executable('configurations', - stdout=('Base configuration\n' - 'Common configuration\n' - 'Common2 configuration\n' - 'Release configuration\n')) - -test.set_configuration('Debug') -test.build('configurations.gyp') -test.run_built_executable('configurations', - stdout=('Base configuration\n' - 'Common configuration\n' - 'Common2 configuration\n' - 'Debug configuration\n')) - -test.pass_test() diff --git a/third_party/gyp/test/configurations/invalid/actions.gyp b/third_party/gyp/test/configurations/invalid/actions.gyp deleted file mode 100644 index a6e4208..0000000 --- a/third_party/gyp/test/configurations/invalid/actions.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'actions': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/all_dependent_settings.gyp b/third_party/gyp/test/configurations/invalid/all_dependent_settings.gyp deleted file mode 100644 index b16a245..0000000 --- a/third_party/gyp/test/configurations/invalid/all_dependent_settings.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'all_dependent_settings': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/configurations.gyp b/third_party/gyp/test/configurations/invalid/configurations.gyp deleted file mode 100644 index 2cfc960..0000000 --- a/third_party/gyp/test/configurations/invalid/configurations.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'configurations': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/dependencies.gyp b/third_party/gyp/test/configurations/invalid/dependencies.gyp deleted file mode 100644 index 74633f3..0000000 --- a/third_party/gyp/test/configurations/invalid/dependencies.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'dependencies': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/direct_dependent_settings.gyp b/third_party/gyp/test/configurations/invalid/direct_dependent_settings.gyp deleted file mode 100644 index 8a0f2e9..0000000 --- a/third_party/gyp/test/configurations/invalid/direct_dependent_settings.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'direct_dependent_settings': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/gyptest-configurations.py b/third_party/gyp/test/configurations/invalid/gyptest-configurations.py deleted file mode 100644 index 0b33bb3..0000000 --- a/third_party/gyp/test/configurations/invalid/gyptest-configurations.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - 'actions', - 'all_dependent_settings', - 'configurations', - 'dependencies', - 'direct_dependent_settings', - 'libraries', - 'link_settings', - 'sources', - 'target_name', - 'type', -] - -test = TestGyp.TestGyp() - -for test_key in invalid_configuration_keys: - test.run_gyp('%s.gyp' % test_key, status=1, stderr=None) - expect = ['%s not allowed in the Debug configuration, found in target ' - '%s.gyp:configurations#target' % (test_key, test_key)] - test.must_contain_all_lines(test.stderr(), expect) - -test.pass_test() diff --git a/third_party/gyp/test/configurations/invalid/libraries.gyp b/third_party/gyp/test/configurations/invalid/libraries.gyp deleted file mode 100644 index c4014ed..0000000 --- a/third_party/gyp/test/configurations/invalid/libraries.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'libraries': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/link_settings.gyp b/third_party/gyp/test/configurations/invalid/link_settings.gyp deleted file mode 100644 index 2f0e1c4..0000000 --- a/third_party/gyp/test/configurations/invalid/link_settings.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'link_settings': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/sources.gyp b/third_party/gyp/test/configurations/invalid/sources.gyp deleted file mode 100644 index b38cca0..0000000 --- a/third_party/gyp/test/configurations/invalid/sources.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'sources': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/target_name.gyp b/third_party/gyp/test/configurations/invalid/target_name.gyp deleted file mode 100644 index 83baad9..0000000 --- a/third_party/gyp/test/configurations/invalid/target_name.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'target_name': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/invalid/type.gyp b/third_party/gyp/test/configurations/invalid/type.gyp deleted file mode 100644 index bc55898..0000000 --- a/third_party/gyp/test/configurations/invalid/type.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'none', - 'configurations': { - 'Debug': { - 'type': [ - ], - }, - } - }, - ], -} diff --git a/third_party/gyp/test/configurations/target_platform/configurations.gyp b/third_party/gyp/test/configurations/target_platform/configurations.gyp deleted file mode 100644 index d15429f..0000000 --- a/third_party/gyp/test/configurations/target_platform/configurations.gyp +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Debug_Win32': { - 'msvs_configuration_platform': 'Win32', - }, - 'Debug_x64': { - 'msvs_configuration_platform': 'x64', - }, - }, - }, - 'targets': [ - { - 'target_name': 'left', - 'type': 'static_library', - 'sources': [ - 'left.c', - ], - 'configurations': { - 'Debug_Win32': { - 'msvs_target_platform': 'x64', - }, - }, - }, - { - 'target_name': 'right', - 'type': 'static_library', - 'sources': [ - 'right.c', - ], - }, - { - 'target_name': 'front_left', - 'type': 'executable', - 'dependencies': ['left'], - 'sources': [ - 'front.c', - ], - 'configurations': { - 'Debug_Win32': { - 'msvs_target_platform': 'x64', - }, - }, - }, - { - 'target_name': 'front_right', - 'type': 'executable', - 'dependencies': ['right'], - 'sources': [ - 'front.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/configurations/target_platform/front.c b/third_party/gyp/test/configurations/target_platform/front.c deleted file mode 100644 index 12b1d0a..0000000 --- a/third_party/gyp/test/configurations/target_platform/front.c +++ /dev/null @@ -1,8 +0,0 @@ -#include - -const char *message(void); - -int main(int argc, char *argv[]) { - printf("%s\n", message()); - return 0; -} diff --git a/third_party/gyp/test/configurations/target_platform/gyptest-target_platform.py b/third_party/gyp/test/configurations/target_platform/gyptest-target_platform.py deleted file mode 100644 index ae4e9e5..0000000 --- a/third_party/gyp/test/configurations/target_platform/gyptest-target_platform.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Tests the msvs specific msvs_target_platform option. -""" - -import TestGyp -import TestCommon - - -def RunX64(exe, stdout): - try: - test.run_built_executable(exe, stdout=stdout) - except WindowsError, e: - # Assume the exe is 64-bit if it can't load on 32-bit systems. - # Both versions of the error are required because different versions - # of python seem to return different errors for invalid exe type. - if e.errno != 193 and '[Error 193]' not in str(e): - raise - - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('configurations.gyp') - -test.set_configuration('Debug|x64') -test.build('configurations.gyp', rebuild=True) -RunX64('front_left', stdout=('left\n')) -RunX64('front_right', stdout=('right\n')) - -test.set_configuration('Debug|Win32') -test.build('configurations.gyp', rebuild=True) -RunX64('front_left', stdout=('left\n')) -test.run_built_executable('front_right', stdout=('right\n')) - -test.pass_test() diff --git a/third_party/gyp/test/configurations/target_platform/left.c b/third_party/gyp/test/configurations/target_platform/left.c deleted file mode 100644 index 1ce2ea1..0000000 --- a/third_party/gyp/test/configurations/target_platform/left.c +++ /dev/null @@ -1,3 +0,0 @@ -const char *message(void) { - return "left"; -} diff --git a/third_party/gyp/test/configurations/target_platform/right.c b/third_party/gyp/test/configurations/target_platform/right.c deleted file mode 100644 index b157849..0000000 --- a/third_party/gyp/test/configurations/target_platform/right.c +++ /dev/null @@ -1,3 +0,0 @@ -const char *message(void) { - return "right"; -} diff --git a/third_party/gyp/test/configurations/x64/configurations.c b/third_party/gyp/test/configurations/x64/configurations.c deleted file mode 100644 index 72c97e3..0000000 --- a/third_party/gyp/test/configurations/x64/configurations.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -int main(int argc, char *argv[]) { - if (sizeof(void*) == 4) { - printf("Running Win32\n"); - } else if (sizeof(void*) == 8) { - printf("Running x64\n"); - } else { - printf("Unexpected platform\n"); - } - return 0; -} diff --git a/third_party/gyp/test/configurations/x64/configurations.gyp b/third_party/gyp/test/configurations/x64/configurations.gyp deleted file mode 100644 index 06ffa37..0000000 --- a/third_party/gyp/test/configurations/x64/configurations.gyp +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'configurations': { - 'Debug': { - 'msvs_configuration_platform': 'Win32', - }, - 'Debug_x64': { - 'inherit_from': ['Debug'], - 'msvs_configuration_platform': 'x64', - }, - }, - }, - 'targets': [ - { - 'target_name': 'configurations', - 'type': 'executable', - 'sources': [ - 'configurations.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/configurations/x64/gyptest-x86.py b/third_party/gyp/test/configurations/x64/gyptest-x86.py deleted file mode 100644 index 254ea6f..0000000 --- a/third_party/gyp/test/configurations/x64/gyptest-x86.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable in three different configurations. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('configurations.gyp') - -for platform in ['Win32', 'x64']: - test.set_configuration('Debug|%s' % platform) - test.build('configurations.gyp', rebuild=True) - try: - test.run_built_executable('configurations', - stdout=('Running %s\n' % platform)) - except WindowsError, e: - # Assume the exe is 64-bit if it can't load on 32-bit systems. - if platform == 'x64' and (e.errno == 193 or '[Error 193]' in str(e)): - continue - raise - -test.pass_test() diff --git a/third_party/gyp/test/copies/gyptest-all.py b/third_party/gyp/test/copies/gyptest-all.py deleted file mode 100644 index 8542ab7..0000000 --- a/third_party/gyp/test/copies/gyptest-all.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies.gyp', test.ALL, chdir='relocate/src') - -test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n') - -test.built_file_must_match('copies-out/file2', - 'file2 contents\n', - chdir='relocate/src') - -test.built_file_must_match('copies-out/directory/file3', - 'file3 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/file4', - 'file4 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/subdir/file5', - 'file5 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/subdir/file6', - 'file6 contents\n', - chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/copies/gyptest-default.py b/third_party/gyp/test/copies/gyptest-default.py deleted file mode 100644 index a5d1bf9..0000000 --- a/third_party/gyp/test/copies/gyptest-default.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using the build tool default. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies.gyp', chdir='relocate/src') - -test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n') - -test.built_file_must_match('copies-out/file2', - 'file2 contents\n', - chdir='relocate/src') - -test.built_file_must_match('copies-out/directory/file3', - 'file3 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/file4', - 'file4 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/directory/subdir/file5', - 'file5 contents\n', - chdir='relocate/src') -test.built_file_must_match('copies-out/subdir/file6', - 'file6 contents\n', - chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/copies/src/copies.gyp b/third_party/gyp/test/copies/src/copies.gyp deleted file mode 100644 index ce2e0ca..0000000 --- a/third_party/gyp/test/copies/src/copies.gyp +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'copies1', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file1', - ], - }, - ], - }, - { - 'target_name': 'copies2', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file2', - ], - }, - ], - }, - # Copy a directory tree. - { - 'target_name': 'copies_recursive', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'directory/', - ], - }, - ], - }, - # Copy a directory from deeper in the tree (this should not reproduce the - # entire directory path in the destination, only the final directory). - { - 'target_name': 'copies_recursive_depth', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'parentdir/subdir/', - ], - }, - ], - }, - # Verify that a null 'files' list doesn't gag the generators. - { - 'target_name': 'copies_null', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-null', - 'files': [], - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/copies/src/directory/file3 b/third_party/gyp/test/copies/src/directory/file3 deleted file mode 100644 index 43f16f3..0000000 --- a/third_party/gyp/test/copies/src/directory/file3 +++ /dev/null @@ -1 +0,0 @@ -file3 contents diff --git a/third_party/gyp/test/copies/src/directory/file4 b/third_party/gyp/test/copies/src/directory/file4 deleted file mode 100644 index 5f7270a..0000000 --- a/third_party/gyp/test/copies/src/directory/file4 +++ /dev/null @@ -1 +0,0 @@ -file4 contents diff --git a/third_party/gyp/test/copies/src/directory/subdir/file5 b/third_party/gyp/test/copies/src/directory/subdir/file5 deleted file mode 100644 index 41f4718..0000000 --- a/third_party/gyp/test/copies/src/directory/subdir/file5 +++ /dev/null @@ -1 +0,0 @@ -file5 contents diff --git a/third_party/gyp/test/copies/src/file1 b/third_party/gyp/test/copies/src/file1 deleted file mode 100644 index 84d55c5..0000000 --- a/third_party/gyp/test/copies/src/file1 +++ /dev/null @@ -1 +0,0 @@ -file1 contents diff --git a/third_party/gyp/test/copies/src/file2 b/third_party/gyp/test/copies/src/file2 deleted file mode 100644 index af1b8ae..0000000 --- a/third_party/gyp/test/copies/src/file2 +++ /dev/null @@ -1 +0,0 @@ -file2 contents diff --git a/third_party/gyp/test/copies/src/parentdir/subdir/file6 b/third_party/gyp/test/copies/src/parentdir/subdir/file6 deleted file mode 100644 index f5d5757..0000000 --- a/third_party/gyp/test/copies/src/parentdir/subdir/file6 +++ /dev/null @@ -1 +0,0 @@ -file6 contents diff --git a/third_party/gyp/test/copies-link/gyptest-copies-link.py b/third_party/gyp/test/copies-link/gyptest-copies-link.py deleted file mode 100644 index fe7b602..0000000 --- a/third_party/gyp/test/copies-link/gyptest-copies-link.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using the build tool default. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies-link.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('copies-link.gyp', chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/copies-link/src/copies-link.gyp b/third_party/gyp/test/copies-link/src/copies-link.gyp deleted file mode 100644 index 9d2530a..0000000 --- a/third_party/gyp/test/copies-link/src/copies-link.gyp +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'func1', - 'type': 'static_library', - 'sources': ['func1.c'], - }, - { - 'target_name': 'clone_func1', - 'type': 'none', - 'dependencies': ['func1'], - 'actions': [ - { - 'action_name': 'cloning library', - 'inputs': [ - '<(LIB_DIR)/<(STATIC_LIB_PREFIX)func1<(STATIC_LIB_SUFFIX)' - ], - 'outputs': ['<(PRODUCT_DIR)/alternate/' - '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)'], - 'destination': '<(PRODUCT_DIR)', - 'action': ['python', 'copy.py', '<@(_inputs)', '<@(_outputs)'], - 'msvs_cygwin_shell': 0, - }, - ], - }, - { - 'target_name': 'copy_cloned', - 'type': 'none', - 'dependencies': ['clone_func1'], - 'copies': [ - { - 'destination': '<(LIB_DIR)', - 'files': [ - '<(PRODUCT_DIR)/alternate/' - '<(STATIC_LIB_PREFIX)cloned<(STATIC_LIB_SUFFIX)', - ], - }, - ], - }, - { - 'target_name': 'use_cloned', - 'type': 'executable', - 'sources': ['main.c'], - 'dependencies': ['copy_cloned'], - 'link_settings': { - 'conditions': [ - ['OS=="win"', { - 'libraries': ['-l"<(LIB_DIR)/cloned.lib"'], - }, { - 'libraries': ['-lcloned'], - 'ldflags': ['-L <(LIB_DIR)'], - }], - ], - }, - }, - ], -} diff --git a/third_party/gyp/test/copies-link/src/copy.py b/third_party/gyp/test/copies-link/src/copy.py deleted file mode 100644 index a1dd871..0000000 --- a/third_party/gyp/test/copies-link/src/copy.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import shutil -import sys - - -def main(argv): - if len(argv) != 3: - print 'USAGE: copy.py ' - return 1 - - shutil.copy(argv[1], argv[2]) - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/third_party/gyp/test/copies-link/src/func1.c b/third_party/gyp/test/copies-link/src/func1.c deleted file mode 100644 index 56fd2f0..0000000 --- a/third_party/gyp/test/copies-link/src/func1.c +++ /dev/null @@ -1,9 +0,0 @@ -#include - -extern void func1(void); - -int main(int argc, char *argv[]) { - printf("hello from link1\n"); - func1(); - return 0; -} diff --git a/third_party/gyp/test/copies-link/src/main.c b/third_party/gyp/test/copies-link/src/main.c deleted file mode 100644 index cceccdd..0000000 --- a/third_party/gyp/test/copies-link/src/main.c +++ /dev/null @@ -1,5 +0,0 @@ -#include - -void func1(void) { - printf("hello from func1\n"); -} diff --git a/third_party/gyp/test/defines/defines-env.gyp b/third_party/gyp/test/defines/defines-env.gyp deleted file mode 100644 index 1781546..0000000 --- a/third_party/gyp/test/defines/defines-env.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'value%': '5', - }, - 'targets': [ - { - 'target_name': 'defines', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'VALUE=<(value)', - ], - }, - ], -} - diff --git a/third_party/gyp/test/defines/defines.c b/third_party/gyp/test/defines/defines.c deleted file mode 100644 index 33657ac..0000000 --- a/third_party/gyp/test/defines/defines.c +++ /dev/null @@ -1,14 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ -#ifdef FOO - printf("FOO is defined\n"); -#endif - printf("VALUE is %d\n", VALUE); - return 0; -} diff --git a/third_party/gyp/test/defines/defines.gyp b/third_party/gyp/test/defines/defines.gyp deleted file mode 100644 index 3db66e5..0000000 --- a/third_party/gyp/test/defines/defines.gyp +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'defines', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'FOO', - 'VALUE=1', - ], - }, - ], - 'conditions': [ - ['OS=="fakeos"', { - 'targets': [ - { - 'target_name': 'fakeosprogram', - 'type': 'executable', - 'sources': [ - 'defines.c', - ], - 'defines': [ - 'FOO', - 'VALUE=1', - ], - }, - ], - }], - ], -} diff --git a/third_party/gyp/test/defines/gyptest-define-override.py b/third_party/gyp/test/defines/gyptest-define-override.py deleted file mode 100644 index 82e325a..0000000 --- a/third_party/gyp/test/defines/gyptest-define-override.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a default gyp define can be overridden. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# Command-line define -test.run_gyp('defines.gyp', '-D', 'OS=fakeos') -test.build('defines.gyp') -test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE) -# Clean up the exe so subsequent tests don't find an old exe. -os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE)) - -# Without "OS" override, fokeosprogram shouldn't be built. -test.run_gyp('defines.gyp') -test.build('defines.gyp') -test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE) - -# Environment define -os.environ['GYP_DEFINES'] = 'OS=fakeos' -test.run_gyp('defines.gyp') -test.build('defines.gyp') -test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE) - -test.pass_test() diff --git a/third_party/gyp/test/defines/gyptest-defines-env-regyp.py b/third_party/gyp/test/defines/gyptest-defines-env-regyp.py deleted file mode 100644 index 70c9ba7..0000000 --- a/third_party/gyp/test/defines/gyptest-defines-env-regyp.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define, and -the use of the environment during regeneration when the gyp file changes. -""" - -import os -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -try: - os.environ['GYP_DEFINES'] = 'value=50' - test.run_gyp('defines.gyp') -finally: - # We clear the environ after calling gyp. When the auto-regeneration happens, - # the same define should be reused anyway. Reset to empty string first in - # case the platform doesn't support unsetenv. - os.environ['GYP_DEFINES'] = '' - del os.environ['GYP_DEFINES'] - -test.build('defines.gyp') - -expect = """\ -FOO is defined -VALUE is 1 -""" -test.run_built_executable('defines', stdout=expect) - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('defines.gyp', test.read('defines-env.gyp')) - -test.build('defines.gyp', test.ALL) - -expect = """\ -VALUE is 50 -""" -test.run_built_executable('defines', stdout=expect) - -test.pass_test() diff --git a/third_party/gyp/test/defines/gyptest-defines-env.py b/third_party/gyp/test/defines/gyptest-defines-env.py deleted file mode 100644 index 6b4e717..0000000 --- a/third_party/gyp/test/defines/gyptest-defines-env.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# With the value only given in environment, it should be used. -try: - os.environ['GYP_DEFINES'] = 'value=10' - test.run_gyp('defines-env.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.build('defines-env.gyp') - -expect = """\ -VALUE is 10 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value given in both command line and environment, -# command line should take precedence. -try: - os.environ['GYP_DEFINES'] = 'value=20' - test.run_gyp('defines-env.gyp', '-Dvalue=25') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 25 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value only given in environment, it should be ignored if -# --ignore-environment is specified. -try: - os.environ['GYP_DEFINES'] = 'value=30' - test.run_gyp('defines-env.gyp', '--ignore-environment') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 5 -""" -test.run_built_executable('defines', stdout=expect) - - -# With the value given in both command line and environment, and -# --ignore-environment also specified, command line should still be used. -try: - os.environ['GYP_DEFINES'] = 'value=40' - test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines.c') -test.build('defines-env.gyp') - -expect = """\ -VALUE is 45 -""" -test.run_built_executable('defines', stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/defines/gyptest-defines.py b/third_party/gyp/test/defines/gyptest-defines.py deleted file mode 100644 index a21a617..0000000 --- a/third_party/gyp/test/defines/gyptest-defines.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ defines. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('defines.gyp') - -test.build('defines.gyp') - -expect = """\ -FOO is defined -VALUE is 1 -""" -test.run_built_executable('defines', stdout=expect) - -test.pass_test() diff --git a/third_party/gyp/test/defines-escaping/defines-escaping.c b/third_party/gyp/test/defines-escaping/defines-escaping.c deleted file mode 100644 index 4407572..0000000 --- a/third_party/gyp/test/defines-escaping/defines-escaping.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2010 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf(TEST_FORMAT, TEST_ARGS); - return 0; -} diff --git a/third_party/gyp/test/defines-escaping/defines-escaping.gyp b/third_party/gyp/test/defines-escaping/defines-escaping.gyp deleted file mode 100644 index 6f0f3fd..0000000 --- a/third_party/gyp/test/defines-escaping/defines-escaping.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'defines_escaping', - 'type': 'executable', - 'sources': [ - 'defines-escaping.c', - ], - 'defines': [ - 'TEST_FORMAT="<(test_format)"', - 'TEST_ARGS=<(test_args)', - ], - }, - ], -} diff --git a/third_party/gyp/test/defines-escaping/gyptest-defines-escaping.py b/third_party/gyp/test/defines-escaping/gyptest-defines-escaping.py deleted file mode 100644 index db0e592..0000000 --- a/third_party/gyp/test/defines-escaping/gyptest-defines-escaping.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies build of an executable with C++ define specified by a gyp define using -various special characters such as quotes, commas, etc. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -# Tests string literals, percents, and backslash escapes. -try: - os.environ['GYP_DEFINES'] = \ - """test_format='%s\\n' test_args='"Simple test of %s with a literal"'""" - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.build('defines-escaping.gyp') - -expect = """\ -Simple test of %s with a literal -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test multiple comma-and-space-separated string literals. -try: - os.environ['GYP_DEFINES'] = \ - """test_format='%s and %s\\n' test_args='"foo", "bar"'""" - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -foo and bar -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing quotes. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s %s %s %s %s\\n' """ + - """test_args='"\\"These,\\"",""" + - """ "\\"words,\\"",""" - """ "\\"are,\\"",""" + - """ "\\"in,\\"",""" + - """ "\\"quotes.\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -"These," "words," "are," "in," "quotes." -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing single quotes. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s %s %s %s %s\\n' """ + - """test_args="\\"'These,'\\",""" + - """ \\"'words,'\\",""" - """ \\"'are,'\\",""" + - """ \\"'in,'\\",""" + - """ \\"'quotes.'\\"" """) - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -'These,' 'words,' 'are,' 'in,' 'quotes.' -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test string literals containing different numbers of backslashes before quotes -# (to exercise Windows' quoting behaviour). -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n%s\\n%s\\n' """ + - """test_args='"\\\\\\"1 visible slash\\\\\\"",""" + - """ "\\\\\\\\\\"2 visible slashes\\\\\\\\\\"",""" - """ "\\\\\\\\\\\\\\"3 visible slashes\\\\\\\\\\\\\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -\\"1 visible slash\\" -\\\\"2 visible slashes\\\\" -\\\\\\"3 visible slashes\\\\\\" -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test that various scary sequences are passed unfettered. -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n' """ + - """test_args='"%PATH%, $foo, " `foo`;"'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -%PATH%, $foo, " `foo`; -""" -test.run_built_executable('defines_escaping', stdout=expect) - - -# Test commas and semi-colons preceded by backslashes (to exercise Windows' -# quoting behaviour). -try: - os.environ['GYP_DEFINES'] = \ - ("""test_format='%s\\n%s\\n' """ + - """test_args='"\\\\, \\\\\\\\;",""" + - # Same thing again, but enclosed in visible quotes. - """ "\\"\\\\, \\\\\\\\;\\""'""") - test.run_gyp('defines-escaping.gyp') -finally: - del os.environ['GYP_DEFINES'] - -test.sleep() -test.touch('defines-escaping.c') -test.build('defines-escaping.gyp') - -expect = """\ -\\, \\\\; -"\\, \\\\;" -""" -test.run_built_executable('defines_escaping', stdout=expect) - -# We deliberately do not test having an odd number of quotes in a string -# literal because that isn't feasible in MSVS. diff --git a/third_party/gyp/test/dependencies/a.c b/third_party/gyp/test/dependencies/a.c deleted file mode 100755 index 3bba111..0000000 --- a/third_party/gyp/test/dependencies/a.c +++ /dev/null @@ -1,9 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -extern int funcB(); - -int funcA() { - return funcB(); -} diff --git a/third_party/gyp/test/dependencies/b/b.c b/third_party/gyp/test/dependencies/b/b.c deleted file mode 100755 index b5e771b..0000000 --- a/third_party/gyp/test/dependencies/b/b.c +++ /dev/null @@ -1,3 +0,0 @@ -int funcB() { - return 2; -} diff --git a/third_party/gyp/test/dependencies/b/b.gyp b/third_party/gyp/test/dependencies/b/b.gyp deleted file mode 100755 index f09e1ff..0000000 --- a/third_party/gyp/test/dependencies/b/b.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'b', - 'type': 'static_library', - 'sources': [ - 'b.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/dependencies/c/c.c b/third_party/gyp/test/dependencies/c/c.c deleted file mode 100644 index 4949daf..0000000 --- a/third_party/gyp/test/dependencies/c/c.c +++ /dev/null @@ -1,4 +0,0 @@ -int funcC() { - return 3 - // Intentional syntax error. This file should never be compiled, so this - // shouldn't be a problem. diff --git a/third_party/gyp/test/dependencies/c/c.gyp b/third_party/gyp/test/dependencies/c/c.gyp deleted file mode 100644 index eabebea..0000000 --- a/third_party/gyp/test/dependencies/c/c.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'c_unused', - 'type': 'static_library', - 'sources': [ - 'c.c', - ], - }, - { - 'target_name': 'd', - 'type': 'static_library', - 'sources': [ - 'd.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/dependencies/c/d.c b/third_party/gyp/test/dependencies/c/d.c deleted file mode 100644 index 05465fc..0000000 --- a/third_party/gyp/test/dependencies/c/d.c +++ /dev/null @@ -1,3 +0,0 @@ -int funcD() { - return 4; -} diff --git a/third_party/gyp/test/dependencies/extra_targets.gyp b/third_party/gyp/test/dependencies/extra_targets.gyp deleted file mode 100644 index c1a26de..0000000 --- a/third_party/gyp/test/dependencies/extra_targets.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - # This only depends on the "d" target; other targets in c.gyp - # should not become part of the build (unlike with 'c/c.gyp:*'). - 'dependencies': ['c/c.gyp:d'], - }, - ], -} diff --git a/third_party/gyp/test/dependencies/gyptest-extra-targets.py b/third_party/gyp/test/dependencies/gyptest-extra-targets.py deleted file mode 100644 index 3752f74..0000000 --- a/third_party/gyp/test/dependencies/gyptest-extra-targets.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify that dependencies don't pull unused targets into the build. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('extra_targets.gyp') - -# This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax -# error and won't compile. -test.build('extra_targets.gyp', test.ALL) - -test.pass_test() diff --git a/third_party/gyp/test/dependencies/gyptest-lib-only.py b/third_party/gyp/test/dependencies/gyptest-lib-only.py deleted file mode 100755 index d90d88f..0000000 --- a/third_party/gyp/test/dependencies/gyptest-lib-only.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify that a link time only dependency will get pulled into the set of built -targets, even if no executable uses it. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('lib_only.gyp') - -test.build('lib_only.gyp', test.ALL) - -# Make doesn't put static libs in a common 'lib' directory, like it does with -# shared libs, so check in the obj path corresponding to the source path. -test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target') - -# TODO(bradnelson/mark): -# On linux and windows a library target will at least pull its link dependencies -# into the generated sln/_main.scons, since not doing so confuses users. -# This is not currently implemented on mac, which has the opposite behavior. -if test.format == 'xcode': - test.built_file_must_not_exist('b', type=test.STATIC_LIB) -else: - test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b') - -test.pass_test() diff --git a/third_party/gyp/test/dependencies/lib_only.gyp b/third_party/gyp/test/dependencies/lib_only.gyp deleted file mode 100755 index f6c84de..0000000 --- a/third_party/gyp/test/dependencies/lib_only.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - 'dependencies': ['b/b.gyp:b'], - }, - ], -} diff --git a/third_party/gyp/test/dependency-copy/gyptest-copy.py b/third_party/gyp/test/dependency-copy/gyptest-copy.py deleted file mode 100644 index 5ba7c73..0000000 --- a/third_party/gyp/test/dependency-copy/gyptest-copy.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies dependencies do the copy step. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('copies.gyp', chdir='src') - -test.build('copies.gyp', 'proj2', chdir='src') - -test.run_built_executable('proj1', - chdir='src', - stdout="Hello from file1.c\n") -test.run_built_executable('proj2', - chdir='src', - stdout="Hello from file2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/dependency-copy/src/copies.gyp b/third_party/gyp/test/dependency-copy/src/copies.gyp deleted file mode 100644 index 4176b18..0000000 --- a/third_party/gyp/test/dependency-copy/src/copies.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'proj1', - 'type': 'executable', - 'sources': [ - 'file1.c', - ], - }, - { - 'target_name': 'proj2', - 'type': 'executable', - 'sources': [ - 'file2.c', - ], - 'dependencies': [ - 'proj1', - ] - }, - ], -} diff --git a/third_party/gyp/test/dependency-copy/src/file1.c b/third_party/gyp/test/dependency-copy/src/file1.c deleted file mode 100644 index 3caf5d6..0000000 --- a/third_party/gyp/test/dependency-copy/src/file1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from file1.c\n"); - return 0; -} diff --git a/third_party/gyp/test/dependency-copy/src/file2.c b/third_party/gyp/test/dependency-copy/src/file2.c deleted file mode 100644 index ed45cc0..0000000 --- a/third_party/gyp/test/dependency-copy/src/file2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from file2.c\n"); - return 0; -} diff --git a/third_party/gyp/test/generator-output/actions/actions.gyp b/third_party/gyp/test/generator-output/actions/actions.gyp deleted file mode 100644 index dded59a..0000000 --- a/third_party/gyp/test/generator-output/actions/actions.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/actions/build/README.txt b/third_party/gyp/test/generator-output/actions/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/actions/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/actions/subdir1/actions-out/README.txt b/third_party/gyp/test/generator-output/actions/subdir1/actions-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/actions-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/actions/subdir1/build/README.txt b/third_party/gyp/test/generator-output/actions/subdir1/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/actions/subdir1/executable.gyp b/third_party/gyp/test/generator-output/actions/subdir1/executable.gyp deleted file mode 100644 index 6bdd60a..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/executable.gyp +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - ], - 'actions': [ - { - 'action_name': 'make-prog1', - 'inputs': [ - 'make-prog1.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/prog1.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - { - 'action_name': 'make-prog2', - 'inputs': [ - 'make-prog2.py', - ], - 'outputs': [ - 'actions-out/prog2.c', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/actions/subdir1/make-prog1.py b/third_party/gyp/test/generator-output/actions/subdir1/make-prog1.py deleted file mode 100644 index 7ea1d8a..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/make-prog1.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog1(void) -{ - printf("Hello from make-prog1.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/generator-output/actions/subdir1/make-prog2.py b/third_party/gyp/test/generator-output/actions/subdir1/make-prog2.py deleted file mode 100644 index 0bfe497..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/make-prog2.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = r""" -#include - -void prog2(void) -{ - printf("Hello from make-prog2.py\n"); -} -""" - -open(sys.argv[1], 'w').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/generator-output/actions/subdir1/program.c b/third_party/gyp/test/generator-output/actions/subdir1/program.c deleted file mode 100644 index d5f661d..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/third_party/gyp/test/generator-output/actions/subdir2/actions-out/README.txt b/third_party/gyp/test/generator-output/actions/subdir2/actions-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir2/actions-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/actions/subdir2/build/README.txt b/third_party/gyp/test/generator-output/actions/subdir2/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/actions/subdir2/make-file.py b/third_party/gyp/test/generator-output/actions/subdir2/make-file.py deleted file mode 100644 index fff0653..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir2/make-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = "Hello from make-file.py\n" - -open(sys.argv[1], 'wb').write(contents) diff --git a/third_party/gyp/test/generator-output/actions/subdir2/none.gyp b/third_party/gyp/test/generator-output/actions/subdir2/none.gyp deleted file mode 100644 index f98f527..0000000 --- a/third_party/gyp/test/generator-output/actions/subdir2/none.gyp +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'file', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'actions': [ - { - 'action_name': 'make-file', - 'inputs': [ - 'make-file.py', - ], - 'outputs': [ - 'actions-out/file.out', - # TODO: enhance testing infrastructure to test this - # without having to hard-code the intermediate dir paths. - #'<(INTERMEDIATE_DIR)/file.out', - ], - 'action': [ - 'python', '<(_inputs)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - } - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/copies/build/README.txt b/third_party/gyp/test/generator-output/copies/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/copies/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/copies/copies-out/README.txt b/third_party/gyp/test/generator-output/copies/copies-out/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/copies/copies-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/copies/copies.gyp b/third_party/gyp/test/generator-output/copies/copies.gyp deleted file mode 100644 index 479a3d9..0000000 --- a/third_party/gyp/test/generator-output/copies/copies.gyp +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_subdir', - 'type': 'none', - 'dependencies': [ - 'subdir/subdir.gyp:*', - ], - }, - { - 'target_name': 'copies1', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file1', - ], - }, - ], - }, - { - 'target_name': 'copies2', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file2', - ], - }, - ], - }, - # Verify that a null 'files' list doesn't gag the generators. - { - 'target_name': 'copies_null', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-null', - 'files': [], - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/copies/file1 b/third_party/gyp/test/generator-output/copies/file1 deleted file mode 100644 index 84d55c5..0000000 --- a/third_party/gyp/test/generator-output/copies/file1 +++ /dev/null @@ -1 +0,0 @@ -file1 contents diff --git a/third_party/gyp/test/generator-output/copies/file2 b/third_party/gyp/test/generator-output/copies/file2 deleted file mode 100644 index af1b8ae..0000000 --- a/third_party/gyp/test/generator-output/copies/file2 +++ /dev/null @@ -1 +0,0 @@ -file2 contents diff --git a/third_party/gyp/test/generator-output/copies/subdir/build/README.txt b/third_party/gyp/test/generator-output/copies/subdir/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/copies/subdir/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/copies/subdir/copies-out/README.txt b/third_party/gyp/test/generator-output/copies/subdir/copies-out/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/copies/subdir/copies-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/copies/subdir/file3 b/third_party/gyp/test/generator-output/copies/subdir/file3 deleted file mode 100644 index 43f16f3..0000000 --- a/third_party/gyp/test/generator-output/copies/subdir/file3 +++ /dev/null @@ -1 +0,0 @@ -file3 contents diff --git a/third_party/gyp/test/generator-output/copies/subdir/file4 b/third_party/gyp/test/generator-output/copies/subdir/file4 deleted file mode 100644 index 5f7270a..0000000 --- a/third_party/gyp/test/generator-output/copies/subdir/file4 +++ /dev/null @@ -1 +0,0 @@ -file4 contents diff --git a/third_party/gyp/test/generator-output/copies/subdir/subdir.gyp b/third_party/gyp/test/generator-output/copies/subdir/subdir.gyp deleted file mode 100644 index af031d2..0000000 --- a/third_party/gyp/test/generator-output/copies/subdir/subdir.gyp +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'copies3', - 'type': 'none', - 'copies': [ - { - 'destination': 'copies-out', - 'files': [ - 'file3', - ], - }, - ], - }, - { - 'target_name': 'copies4', - 'type': 'none', - 'copies': [ - { - 'destination': '<(PRODUCT_DIR)/copies-out', - 'files': [ - 'file4', - ], - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/gyptest-actions.py b/third_party/gyp/test/generator-output/gyptest-actions.py deleted file mode 100644 index 73ac5ae..0000000 --- a/third_party/gyp/test/generator-output/gyptest-actions.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies --generator-output= behavior when using actions. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -# All the generated files should go under 'gypfiles'. The source directory -# ('actions') should be untouched. -test.writable(test.workpath('actions'), False) -test.run_gyp('actions.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='actions') - -test.writable(test.workpath('actions'), True) - -test.relocate('actions', 'relocate/actions') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/actions'), False) - -# Some of the action outputs use "pure" relative paths (i.e. without prefixes -# like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under -# 'gypfiles', such outputs will still be created relative to the original .gyp -# sources. Projects probably wouldn't normally do this, since it kind of defeats -# the purpose of '--generator-output', but it is supported behaviour. -test.writable(test.workpath('relocate/actions/build'), True) -test.writable(test.workpath('relocate/actions/subdir1/build'), True) -test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True) -test.writable(test.workpath('relocate/actions/subdir2/build'), True) -test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True) - -test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles') - -expect = """\ -Hello from program.c -Hello from make-prog1.py -Hello from make-prog2.py -""" - -if test.format == 'xcode': - chdir = 'relocate/actions/subdir1' -else: - chdir = 'relocate/gypfiles' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -test.must_match('relocate/actions/subdir2/actions-out/file.out', - "Hello from make-file.py\n") - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/gyptest-copies.py b/third_party/gyp/test/generator-output/gyptest-copies.py deleted file mode 100644 index 414b7c3..0000000 --- a/third_party/gyp/test/generator-output/gyptest-copies.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies file copies using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('copies'), False) - -test.run_gyp('copies.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='copies') - -test.writable(test.workpath('copies'), True) - -test.relocate('copies', 'relocate/copies') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/copies'), False) - -test.writable(test.workpath('relocate/copies/build'), True) -test.writable(test.workpath('relocate/copies/copies-out'), True) -test.writable(test.workpath('relocate/copies/subdir/build'), True) -test.writable(test.workpath('relocate/copies/subdir/copies-out'), True) - -test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles') - -test.must_match(['relocate', 'copies', 'copies-out', 'file1'], - "file1 contents\n") - -if test.format == 'xcode': - chdir = 'relocate/copies/build' -elif test.format == 'make': - chdir = 'relocate/gypfiles/out' -else: - chdir = 'relocate/gypfiles' -test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n") - -test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'], - "file3 contents\n") - -if test.format == 'xcode': - chdir = 'relocate/copies/subdir/build' -elif test.format == 'make': - chdir = 'relocate/gypfiles/out' -else: - chdir = 'relocate/gypfiles' -test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n") - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/gyptest-relocate.py b/third_party/gyp/test/generator-output/gyptest-relocate.py deleted file mode 100644 index dd1c2bd..0000000 --- a/third_party/gyp/test/generator-output/gyptest-relocate.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a project hierarchy created with the --generator-output= -option can be built even when it's relocated to a different path. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.run_gyp('prog1.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src') - -test.writable(test.workpath('src'), True) - -test.relocate('src', 'relocate/src') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/src'), False) - -test.writable(test.workpath('relocate/src/build'), True) -test.writable(test.workpath('relocate/src/subdir2/build'), True) -test.writable(test.workpath('relocate/src/subdir3/build'), True) - -test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles') - -chdir = 'relocate/gypfiles' - -expect = """\ -Hello from %s -Hello from inc.h -Hello from inc1/include1.h -Hello from inc2/include2.h -Hello from inc3/include3.h -Hello from subdir2/deeper/deeper.h -""" - -if test.format == 'xcode': - chdir = 'relocate/src' -test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir2' -test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c') - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/gyptest-rules.py b/third_party/gyp/test/generator-output/gyptest-rules.py deleted file mode 100644 index 05b674f..0000000 --- a/third_party/gyp/test/generator-output/gyptest-rules.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies --generator-output= behavior when using rules. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('rules'), False) - -test.run_gyp('rules.gyp', - '--generator-output=' + test.workpath('gypfiles'), - chdir='rules') - -test.writable(test.workpath('rules'), True) - -test.relocate('rules', 'relocate/rules') -test.relocate('gypfiles', 'relocate/gypfiles') - -test.writable(test.workpath('relocate/rules'), False) - -test.writable(test.workpath('relocate/rules/build'), True) -test.writable(test.workpath('relocate/rules/subdir1/build'), True) -test.writable(test.workpath('relocate/rules/subdir2/build'), True) -test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True) - -test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles') - -expect = """\ -Hello from program.c -Hello from function1.in1 -Hello from function2.in1 -Hello from define3.in0 -Hello from define4.in0 -""" - -if test.format == 'xcode': - chdir = 'relocate/rules/subdir1' -else: - chdir = 'relocate/gypfiles' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -test.must_match('relocate/rules/subdir2/rules-out/file1.out', - "Hello from file1.in0\n") -test.must_match('relocate/rules/subdir2/rules-out/file2.out', - "Hello from file2.in0\n") -test.must_match('relocate/rules/subdir2/rules-out/file3.out', - "Hello from file3.in1\n") -test.must_match('relocate/rules/subdir2/rules-out/file4.out', - "Hello from file4.in1\n") - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/gyptest-subdir2-deep.py b/third_party/gyp/test/generator-output/gyptest-subdir2-deep.py deleted file mode 100644 index ea1b472..0000000 --- a/third_party/gyp/test/generator-output/gyptest-subdir2-deep.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target from a .gyp file a few subdirectories -deep when the --generator-output= option is used to put the build -configuration files in a separate directory tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.writable(test.workpath('src/subdir2/deeper/build'), True) - -test.run_gyp('deeper.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src/subdir2/deeper') - -test.build('deeper.gyp', test.ALL, chdir='gypfiles') - -chdir = 'gypfiles' - -if test.format == 'xcode': - chdir = 'src/subdir2/deeper' -test.run_built_executable('deeper', - chdir=chdir, - stdout="Hello from deeper.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/gyptest-top-all.py b/third_party/gyp/test/generator-output/gyptest-top-all.py deleted file mode 100644 index 902ceb2..0000000 --- a/third_party/gyp/test/generator-output/gyptest-top-all.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a project hierarchy created when the --generator-output= -option is used to put the build configuration files in a separate -directory tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.writable(test.workpath('src'), False) - -test.run_gyp('prog1.gyp', - '-Dset_symroot=1', - '--generator-output=' + test.workpath('gypfiles'), - chdir='src') - -test.writable(test.workpath('src/build'), True) -test.writable(test.workpath('src/subdir2/build'), True) -test.writable(test.workpath('src/subdir3/build'), True) - -test.build('prog1.gyp', test.ALL, chdir='gypfiles') - -chdir = 'gypfiles' - -expect = """\ -Hello from %s -Hello from inc.h -Hello from inc1/include1.h -Hello from inc2/include2.h -Hello from inc3/include3.h -Hello from subdir2/deeper/deeper.h -""" - -if test.format == 'xcode': - chdir = 'src' -test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c') - -if test.format == 'xcode': - chdir = 'src/subdir2' -test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c') - -if test.format == 'xcode': - chdir = 'src/subdir3' -test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c') - -test.pass_test() diff --git a/third_party/gyp/test/generator-output/rules/build/README.txt b/third_party/gyp/test/generator-output/rules/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/rules/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/rules/copy-file.py b/third_party/gyp/test/generator-output/rules/copy-file.py deleted file mode 100644 index 938c336..0000000 --- a/third_party/gyp/test/generator-output/rules/copy-file.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/generator-output/rules/rules.gyp b/third_party/gyp/test/generator-output/rules/rules.gyp deleted file mode 100644 index dded59a..0000000 --- a/third_party/gyp/test/generator-output/rules/rules.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/rules/subdir1/build/README.txt b/third_party/gyp/test/generator-output/rules/subdir1/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/rules/subdir1/define3.in0 b/third_party/gyp/test/generator-output/rules/subdir1/define3.in0 deleted file mode 100644 index cc29c64..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/define3.in0 +++ /dev/null @@ -1 +0,0 @@ -#define STRING3 "Hello from define3.in0\n" diff --git a/third_party/gyp/test/generator-output/rules/subdir1/define4.in0 b/third_party/gyp/test/generator-output/rules/subdir1/define4.in0 deleted file mode 100644 index c9b0467..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/define4.in0 +++ /dev/null @@ -1 +0,0 @@ -#define STRING4 "Hello from define4.in0\n" diff --git a/third_party/gyp/test/generator-output/rules/subdir1/executable.gyp b/third_party/gyp/test/generator-output/rules/subdir1/executable.gyp deleted file mode 100644 index 2fd89a0..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/executable.gyp +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function1.in1', - 'function2.in1', - 'define3.in0', - 'define4.in0', - ], - 'include_dirs': [ - '<(INTERMEDIATE_DIR)', - ], - 'rules': [ - { - 'rule_name': 'copy_file_0', - 'extension': 'in0', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 0, - }, - { - 'rule_name': 'copy_file_1', - 'extension': 'in1', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/rules/subdir1/function1.in1 b/third_party/gyp/test/generator-output/rules/subdir1/function1.in1 deleted file mode 100644 index 545e7ca..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/function1.in1 +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function1(void) -{ - printf("Hello from function1.in1\n"); -} diff --git a/third_party/gyp/test/generator-output/rules/subdir1/function2.in1 b/third_party/gyp/test/generator-output/rules/subdir1/function2.in1 deleted file mode 100644 index 6bad43f..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/function2.in1 +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function2(void) -{ - printf("Hello from function2.in1\n"); -} diff --git a/third_party/gyp/test/generator-output/rules/subdir1/program.c b/third_party/gyp/test/generator-output/rules/subdir1/program.c deleted file mode 100644 index 27fd31e..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir1/program.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include "define3.h" -#include "define4.h" - -extern void function1(void); -extern void function2(void); -extern void function3(void); -extern void function4(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function1(); - function2(); - printf("%s", STRING3); - printf("%s", STRING4); - return 0; -} diff --git a/third_party/gyp/test/generator-output/rules/subdir2/build/README.txt b/third_party/gyp/test/generator-output/rules/subdir2/build/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/rules/subdir2/file1.in0 b/third_party/gyp/test/generator-output/rules/subdir2/file1.in0 deleted file mode 100644 index 7aca64f..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/file1.in0 +++ /dev/null @@ -1 +0,0 @@ -Hello from file1.in0 diff --git a/third_party/gyp/test/generator-output/rules/subdir2/file2.in0 b/third_party/gyp/test/generator-output/rules/subdir2/file2.in0 deleted file mode 100644 index 80a281a..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/file2.in0 +++ /dev/null @@ -1 +0,0 @@ -Hello from file2.in0 diff --git a/third_party/gyp/test/generator-output/rules/subdir2/file3.in1 b/third_party/gyp/test/generator-output/rules/subdir2/file3.in1 deleted file mode 100644 index 60ae2e7..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/file3.in1 +++ /dev/null @@ -1 +0,0 @@ -Hello from file3.in1 diff --git a/third_party/gyp/test/generator-output/rules/subdir2/file4.in1 b/third_party/gyp/test/generator-output/rules/subdir2/file4.in1 deleted file mode 100644 index 5a3c307..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/file4.in1 +++ /dev/null @@ -1 +0,0 @@ -Hello from file4.in1 diff --git a/third_party/gyp/test/generator-output/rules/subdir2/none.gyp b/third_party/gyp/test/generator-output/rules/subdir2/none.gyp deleted file mode 100644 index 664cbd9..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/none.gyp +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'files', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'file1.in0', - 'file2.in0', - 'file3.in1', - 'file4.in1', - ], - 'rules': [ - { - 'rule_name': 'copy_file_0', - 'extension': 'in0', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - 'rules-out/<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 0, - }, - { - 'rule_name': 'copy_file_1', - 'extension': 'in1', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - 'rules-out/<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/rules/subdir2/rules-out/README.txt b/third_party/gyp/test/generator-output/rules/subdir2/rules-out/README.txt deleted file mode 100644 index 1b052c9..0000000 --- a/third_party/gyp/test/generator-output/rules/subdir2/rules-out/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/src/build/README.txt b/third_party/gyp/test/generator-output/src/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/src/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/src/inc.h b/third_party/gyp/test/generator-output/src/inc.h deleted file mode 100644 index 57aa1a5..0000000 --- a/third_party/gyp/test/generator-output/src/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "inc.h" diff --git a/third_party/gyp/test/generator-output/src/inc1/include1.h b/third_party/gyp/test/generator-output/src/inc1/include1.h deleted file mode 100644 index 1d59065..0000000 --- a/third_party/gyp/test/generator-output/src/inc1/include1.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE1_STRING "inc1/include1.h" diff --git a/third_party/gyp/test/generator-output/src/prog1.c b/third_party/gyp/test/generator-output/src/prog1.c deleted file mode 100644 index 656f81d..0000000 --- a/third_party/gyp/test/generator-output/src/prog1.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/third_party/gyp/test/generator-output/src/prog1.gyp b/third_party/gyp/test/generator-output/src/prog1.gyp deleted file mode 100644 index d50e6fb..0000000 --- a/third_party/gyp/test/generator-output/src/prog1.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'dependencies': [ - 'subdir2/prog2.gyp:prog2', - ], - 'include_dirs': [ - '.', - 'inc1', - 'subdir2/inc2', - 'subdir3/inc3', - 'subdir2/deeper', - ], - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/src/subdir2/build/README.txt b/third_party/gyp/test/generator-output/src/subdir2/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/src/subdir2/deeper/build/README.txt b/third_party/gyp/test/generator-output/src/subdir2/deeper/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/deeper/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.c b/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.c deleted file mode 100644 index 56c49d1..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from deeper.c\n"); - return 0; -} diff --git a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp b/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp deleted file mode 100644 index 8648770..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'deeper', - 'type': 'executable', - 'sources': [ - 'deeper.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.h b/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.h deleted file mode 100644 index f6484a0..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/deeper/deeper.h +++ /dev/null @@ -1 +0,0 @@ -#define DEEPER_STRING "subdir2/deeper/deeper.h" diff --git a/third_party/gyp/test/generator-output/src/subdir2/inc2/include2.h b/third_party/gyp/test/generator-output/src/subdir2/inc2/include2.h deleted file mode 100644 index 1ccfa5d..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/inc2/include2.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE2_STRING "inc2/include2.h" diff --git a/third_party/gyp/test/generator-output/src/subdir2/prog2.c b/third_party/gyp/test/generator-output/src/subdir2/prog2.c deleted file mode 100644 index 38d6c84..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/prog2.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/third_party/gyp/test/generator-output/src/subdir2/prog2.gyp b/third_party/gyp/test/generator-output/src/subdir2/prog2.gyp deleted file mode 100644 index 7176ed8..0000000 --- a/third_party/gyp/test/generator-output/src/subdir2/prog2.gyp +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'include_dirs': [ - '..', - '../inc1', - 'inc2', - '../subdir3/inc3', - 'deeper', - ], - 'dependencies': [ - '../subdir3/prog3.gyp:prog3', - ], - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/src/subdir3/build/README.txt b/third_party/gyp/test/generator-output/src/subdir3/build/README.txt deleted file mode 100644 index 90ef886..0000000 --- a/third_party/gyp/test/generator-output/src/subdir3/build/README.txt +++ /dev/null @@ -1,4 +0,0 @@ -A place-holder for this Xcode build output directory, so that the -test script can verify that .xcodeproj files are not created in -their normal location by making the src/ read-only, and then -selectively making this build directory writable. diff --git a/third_party/gyp/test/generator-output/src/subdir3/inc3/include3.h b/third_party/gyp/test/generator-output/src/subdir3/inc3/include3.h deleted file mode 100644 index bf53bf1..0000000 --- a/third_party/gyp/test/generator-output/src/subdir3/inc3/include3.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE3_STRING "inc3/include3.h" diff --git a/third_party/gyp/test/generator-output/src/subdir3/prog3.c b/third_party/gyp/test/generator-output/src/subdir3/prog3.c deleted file mode 100644 index 7848b45..0000000 --- a/third_party/gyp/test/generator-output/src/subdir3/prog3.c +++ /dev/null @@ -1,18 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" -#include "include3.h" -#include "deeper.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from prog3.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - printf("Hello from %s\n", INCLUDE3_STRING); - printf("Hello from %s\n", DEEPER_STRING); - return 0; -} diff --git a/third_party/gyp/test/generator-output/src/subdir3/prog3.gyp b/third_party/gyp/test/generator-output/src/subdir3/prog3.gyp deleted file mode 100644 index 46c5e00..0000000 --- a/third_party/gyp/test/generator-output/src/subdir3/prog3.gyp +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'include_dirs': [ - '..', - '../inc1', - '../subdir2/inc2', - 'inc3', - '../subdir2/deeper', - ], - 'sources': [ - 'prog3.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/generator-output/src/symroot.gypi b/third_party/gyp/test/generator-output/src/symroot.gypi deleted file mode 100644 index 5199164..0000000 --- a/third_party/gyp/test/generator-output/src/symroot.gypi +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'set_symroot%': 0, - }, - 'conditions': [ - ['set_symroot == 1', { - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/build', - }, - }], - ], -} diff --git a/third_party/gyp/test/hello/gyptest-all.py b/third_party/gyp/test/hello/gyptest-all.py deleted file mode 100644 index 9ecff55..0000000 --- a/third_party/gyp/test/hello/gyptest-all.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', test.ALL) - -test.pass_test() diff --git a/third_party/gyp/test/hello/gyptest-default.py b/third_party/gyp/test/hello/gyptest-default.py deleted file mode 100644 index 76fffb3..0000000 --- a/third_party/gyp/test/hello/gyptest-default.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp') - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', test.DEFAULT) - -test.pass_test() diff --git a/third_party/gyp/test/hello/gyptest-disable-regyp.py b/third_party/gyp/test/hello/gyptest-disable-regyp.py deleted file mode 100644 index 1e4b306..0000000 --- a/third_party/gyp/test/hello/gyptest-disable-regyp.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that Makefiles don't get rebuilt when a source gyp file changes and -the disable_regeneration generator flag is set. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp', '-Gauto_regeneration=0') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('hello.gyp', test.read('hello2.gyp')) - -test.build('hello.gyp', test.ALL) - -# Should still be the old executable, as regeneration was disabled. -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.pass_test() diff --git a/third_party/gyp/test/hello/gyptest-regyp.py b/third_party/gyp/test/hello/gyptest-regyp.py deleted file mode 100644 index 827c723..0000000 --- a/third_party/gyp/test/hello/gyptest-regyp.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that Makefiles get rebuilt when a source gyp file changes. -""" - -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, world!\n") - -# Sleep so that the changed gyp file will have a newer timestamp than the -# previously generated build files. -test.sleep() -test.write('hello.gyp', test.read('hello2.gyp')) - -test.build('hello.gyp', test.ALL) - -test.run_built_executable('hello', stdout="Hello, two!\n") - -test.pass_test() diff --git a/third_party/gyp/test/hello/gyptest-target.py b/third_party/gyp/test/hello/gyptest-target.py deleted file mode 100755 index 2f0a2a3..0000000 --- a/third_party/gyp/test/hello/gyptest-target.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using an explicit build target of 'hello'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('hello.gyp') - -test.build('hello.gyp', 'hello') - -test.run_built_executable('hello', stdout="Hello, world!\n") - -test.up_to_date('hello.gyp', 'hello') - -test.pass_test() diff --git a/third_party/gyp/test/hello/hello.c b/third_party/gyp/test/hello/hello.c deleted file mode 100644 index 8dbecc0..0000000 --- a/third_party/gyp/test/hello/hello.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf("Hello, world!\n"); - return 0; -} diff --git a/third_party/gyp/test/hello/hello.gyp b/third_party/gyp/test/hello/hello.gyp deleted file mode 100644 index 1974d51..0000000 --- a/third_party/gyp/test/hello/hello.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/hello/hello2.c b/third_party/gyp/test/hello/hello2.c deleted file mode 100644 index 19ef3fb..0000000 --- a/third_party/gyp/test/hello/hello2.c +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ - printf("Hello, two!\n"); - return 0; -} diff --git a/third_party/gyp/test/hello/hello2.gyp b/third_party/gyp/test/hello/hello2.gyp deleted file mode 100644 index 25b08ca..0000000 --- a/third_party/gyp/test/hello/hello2.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello', - 'type': 'executable', - 'sources': [ - 'hello2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py b/third_party/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py deleted file mode 100644 index a2b9f30..0000000 --- a/third_party/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation -and with regeneration. -""" - -import os -import TestGyp - -# Regenerating build files when a gyp file changes is currently only supported -# by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -os.environ['HOME'] = os.path.abspath('home') - -test.run_gyp('all.gyp', chdir='src') - -# After relocating, we should still be able to build (build file shouldn't -# contain relative reference to ~/.gyp/includes.gypi) -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome\n"); - -# Building should notice any changes to ~/.gyp/includes.gypi and regyp. -test.sleep() - -test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi')) - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome2\n"); - -test.pass_test() diff --git a/third_party/gyp/test/home_dot_gyp/gyptest-home-includes.py b/third_party/gyp/test/home_dot_gyp/gyptest-home-includes.py deleted file mode 100644 index 6a0e965..0000000 --- a/third_party/gyp/test/home_dot_gyp/gyptest-home-includes.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies inclusion of $HOME/.gyp/includes.gypi works. -""" - -import os -import TestGyp - -test = TestGyp.TestGyp() - -os.environ['HOME'] = os.path.abspath('home') - -test.run_gyp('all.gyp', chdir='src') - -# After relocating, we should still be able to build (build file shouldn't -# contain relative reference to ~/.gyp/includes.gypi) -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('printfoo', - chdir='relocate/src', - stdout="FOO is fromhome\n"); - -test.pass_test() diff --git a/third_party/gyp/test/home_dot_gyp/home/.gyp/include.gypi b/third_party/gyp/test/home_dot_gyp/home/.gyp/include.gypi deleted file mode 100644 index fcfb39b..0000000 --- a/third_party/gyp/test/home_dot_gyp/home/.gyp/include.gypi +++ /dev/null @@ -1,5 +0,0 @@ -{ - 'variables': { - 'foo': '"fromhome"', - }, -} diff --git a/third_party/gyp/test/home_dot_gyp/home2/.gyp/include.gypi b/third_party/gyp/test/home_dot_gyp/home2/.gyp/include.gypi deleted file mode 100644 index f0d84b3..0000000 --- a/third_party/gyp/test/home_dot_gyp/home2/.gyp/include.gypi +++ /dev/null @@ -1,5 +0,0 @@ -{ - 'variables': { - 'foo': '"fromhome2"', - }, -} diff --git a/third_party/gyp/test/home_dot_gyp/src/all.gyp b/third_party/gyp/test/home_dot_gyp/src/all.gyp deleted file mode 100644 index 14b6aea..0000000 --- a/third_party/gyp/test/home_dot_gyp/src/all.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'foo%': '"fromdefault"', - }, - 'targets': [ - { - 'target_name': 'printfoo', - 'type': 'executable', - 'sources': [ - 'printfoo.c', - ], - 'defines': [ - 'FOO=<(foo)', - ], - }, - ], -} - diff --git a/third_party/gyp/test/home_dot_gyp/src/printfoo.c b/third_party/gyp/test/home_dot_gyp/src/printfoo.c deleted file mode 100644 index 92d2cba..0000000 --- a/third_party/gyp/test/home_dot_gyp/src/printfoo.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("FOO is %s\n", FOO); - return 0; -} diff --git a/third_party/gyp/test/include_dirs/gyptest-all.py b/third_party/gyp/test/include_dirs/gyptest-all.py deleted file mode 100644 index 7496600..0000000 --- a/third_party/gyp/test/include_dirs/gyptest-all.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies use of include_dirs when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('includes.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('includes.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from includes.c -Hello from inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('includes', stdout=expect, chdir='relocate/src') - -if test.format == 'xcode': - chdir='relocate/src/subdir' -else: - chdir='relocate/src' - -expect = """\ -Hello from subdir/subdir_includes.c -Hello from subdir/inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir) - -test.pass_test() diff --git a/third_party/gyp/test/include_dirs/gyptest-default.py b/third_party/gyp/test/include_dirs/gyptest-default.py deleted file mode 100644 index 467f58d..0000000 --- a/third_party/gyp/test/include_dirs/gyptest-default.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies use of include_dirs when using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('includes.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('includes.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from includes.c -Hello from inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('includes', stdout=expect, chdir='relocate/src') - -if test.format == 'xcode': - chdir='relocate/src/subdir' -else: - chdir='relocate/src' - -expect = """\ -Hello from subdir/subdir_includes.c -Hello from subdir/inc.h -Hello from include1.h -Hello from subdir/inc2/include2.h -""" -test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir) - -test.pass_test() diff --git a/third_party/gyp/test/include_dirs/src/inc.h b/third_party/gyp/test/include_dirs/src/inc.h deleted file mode 100644 index 0398d69..0000000 --- a/third_party/gyp/test/include_dirs/src/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "inc.h" diff --git a/third_party/gyp/test/include_dirs/src/inc1/include1.h b/third_party/gyp/test/include_dirs/src/inc1/include1.h deleted file mode 100644 index 43356b5..0000000 --- a/third_party/gyp/test/include_dirs/src/inc1/include1.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE1_STRING "include1.h" diff --git a/third_party/gyp/test/include_dirs/src/includes.c b/third_party/gyp/test/include_dirs/src/includes.c deleted file mode 100644 index 756c427..0000000 --- a/third_party/gyp/test/include_dirs/src/includes.c +++ /dev/null @@ -1,14 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from includes.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - return 0; -} diff --git a/third_party/gyp/test/include_dirs/src/includes.gyp b/third_party/gyp/test/include_dirs/src/includes.gyp deleted file mode 100644 index a2a55cc..0000000 --- a/third_party/gyp/test/include_dirs/src/includes.gyp +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'includes', - 'type': 'executable', - 'dependencies': [ - 'subdir/subdir_includes.gyp:subdir_includes', - ], - 'include_dirs': [ - '.', - 'inc1', - 'subdir/inc2', - ], - 'sources': [ - 'includes.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/include_dirs/src/subdir/inc.h b/third_party/gyp/test/include_dirs/src/subdir/inc.h deleted file mode 100644 index 0a68d7b..0000000 --- a/third_party/gyp/test/include_dirs/src/subdir/inc.h +++ /dev/null @@ -1 +0,0 @@ -#define INC_STRING "subdir/inc.h" diff --git a/third_party/gyp/test/include_dirs/src/subdir/inc2/include2.h b/third_party/gyp/test/include_dirs/src/subdir/inc2/include2.h deleted file mode 100644 index 721577e..0000000 --- a/third_party/gyp/test/include_dirs/src/subdir/inc2/include2.h +++ /dev/null @@ -1 +0,0 @@ -#define INCLUDE2_STRING "subdir/inc2/include2.h" diff --git a/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.c b/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.c deleted file mode 100644 index 727f682..0000000 --- a/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.c +++ /dev/null @@ -1,14 +0,0 @@ -#include - -#include "inc.h" -#include "include1.h" -#include "include2.h" - -int main(int argc, char *argv[]) -{ - printf("Hello from subdir/subdir_includes.c\n"); - printf("Hello from %s\n", INC_STRING); - printf("Hello from %s\n", INCLUDE1_STRING); - printf("Hello from %s\n", INCLUDE2_STRING); - return 0; -} diff --git a/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.gyp b/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.gyp deleted file mode 100644 index 257d052..0000000 --- a/third_party/gyp/test/include_dirs/src/subdir/subdir_includes.gyp +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'subdir_includes', - 'type': 'executable', - 'include_dirs': [ - '.', - '../inc1', - 'inc2', - ], - 'sources': [ - 'subdir_includes.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/lib/README.txt b/third_party/gyp/test/lib/README.txt deleted file mode 100644 index b3d7245..0000000 --- a/third_party/gyp/test/lib/README.txt +++ /dev/null @@ -1,17 +0,0 @@ -Supporting modules for GYP testing. - - TestCmd.py - TestCommon.py - - Modules for generic testing of command-line utilities, - specifically including the ability to copy a test configuration - to temporary directories (with default cleanup on exit) as part - of running test scripts that invoke commands, compare actual - against expected output, etc. - - Our copies of these come from the SCons project, - http://www.scons.org/. - - TestGyp.py - - Modules for GYP-specific tests, of course. diff --git a/third_party/gyp/test/lib/TestCmd.py b/third_party/gyp/test/lib/TestCmd.py deleted file mode 100644 index 029c1d0..0000000 --- a/third_party/gyp/test/lib/TestCmd.py +++ /dev/null @@ -1,1591 +0,0 @@ -""" -TestCmd.py: a testing framework for commands and scripts. - -The TestCmd module provides a framework for portable automated testing -of executable commands and scripts (in any language, not just Python), -especially commands and scripts that require file system interaction. - -In addition to running tests and evaluating conditions, the TestCmd -module manages and cleans up one or more temporary workspace -directories, and provides methods for creating files and directories in -those workspace directories from in-line data, here-documents), allowing -tests to be completely self-contained. - -A TestCmd environment object is created via the usual invocation: - - import TestCmd - test = TestCmd.TestCmd() - -There are a bunch of keyword arguments available at instantiation: - - test = TestCmd.TestCmd(description = 'string', - program = 'program_or_script_to_test', - interpreter = 'script_interpreter', - workdir = 'prefix', - subdir = 'subdir', - verbose = Boolean, - match = default_match_function, - diff = default_diff_function, - combine = Boolean) - -There are a bunch of methods that let you do different things: - - test.verbose_set(1) - - test.description_set('string') - - test.program_set('program_or_script_to_test') - - test.interpreter_set('script_interpreter') - test.interpreter_set(['script_interpreter', 'arg']) - - test.workdir_set('prefix') - test.workdir_set('') - - test.workpath('file') - test.workpath('subdir', 'file') - - test.subdir('subdir', ...) - - test.rmdir('subdir', ...) - - test.write('file', "contents\n") - test.write(['subdir', 'file'], "contents\n") - - test.read('file') - test.read(['subdir', 'file']) - test.read('file', mode) - test.read(['subdir', 'file'], mode) - - test.writable('dir', 1) - test.writable('dir', None) - - test.preserve(condition, ...) - - test.cleanup(condition) - - test.command_args(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program') - - test.run(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program', - chdir = 'directory_to_chdir_to', - stdin = 'input to feed to the program\n') - universal_newlines = True) - - p = test.start(program = 'program_or_script_to_run', - interpreter = 'script_interpreter', - arguments = 'arguments to pass to program', - universal_newlines = None) - - test.finish(self, p) - - test.pass_test() - test.pass_test(condition) - test.pass_test(condition, function) - - test.fail_test() - test.fail_test(condition) - test.fail_test(condition, function) - test.fail_test(condition, function, skip) - - test.no_result() - test.no_result(condition) - test.no_result(condition, function) - test.no_result(condition, function, skip) - - test.stdout() - test.stdout(run) - - test.stderr() - test.stderr(run) - - test.symlink(target, link) - - test.banner(string) - test.banner(string, width) - - test.diff(actual, expected) - - test.match(actual, expected) - - test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n") - test.match_exact(["actual 1\n", "actual 2\n"], - ["expected 1\n", "expected 2\n"]) - - test.match_re("actual 1\nactual 2\n", regex_string) - test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes) - - test.match_re_dotall("actual 1\nactual 2\n", regex_string) - test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes) - - test.tempdir() - test.tempdir('temporary-directory') - - test.sleep() - test.sleep(seconds) - - test.where_is('foo') - test.where_is('foo', 'PATH1:PATH2') - test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4') - - test.unlink('file') - test.unlink('subdir', 'file') - -The TestCmd module provides pass_test(), fail_test(), and no_result() -unbound functions that report test results for use with the Aegis change -management system. These methods terminate the test immediately, -reporting PASSED, FAILED, or NO RESULT respectively, and exiting with -status 0 (success), 1 or 2 respectively. This allows for a distinction -between an actual failed test and a test that could not be properly -evaluated because of an external condition (such as a full file system -or incorrect permissions). - - import TestCmd - - TestCmd.pass_test() - TestCmd.pass_test(condition) - TestCmd.pass_test(condition, function) - - TestCmd.fail_test() - TestCmd.fail_test(condition) - TestCmd.fail_test(condition, function) - TestCmd.fail_test(condition, function, skip) - - TestCmd.no_result() - TestCmd.no_result(condition) - TestCmd.no_result(condition, function) - TestCmd.no_result(condition, function, skip) - -The TestCmd module also provides unbound functions that handle matching -in the same way as the match_*() methods described above. - - import TestCmd - - test = TestCmd.TestCmd(match = TestCmd.match_exact) - - test = TestCmd.TestCmd(match = TestCmd.match_re) - - test = TestCmd.TestCmd(match = TestCmd.match_re_dotall) - -The TestCmd module provides unbound functions that can be used for the -"diff" argument to TestCmd.TestCmd instantiation: - - import TestCmd - - test = TestCmd.TestCmd(match = TestCmd.match_re, - diff = TestCmd.diff_re) - - test = TestCmd.TestCmd(diff = TestCmd.simple_diff) - -The "diff" argument can also be used with standard difflib functions: - - import difflib - - test = TestCmd.TestCmd(diff = difflib.context_diff) - - test = TestCmd.TestCmd(diff = difflib.unified_diff) - -Lastly, the where_is() method also exists in an unbound function -version. - - import TestCmd - - TestCmd.where_is('foo') - TestCmd.where_is('foo', 'PATH1:PATH2') - TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4') -""" - -# Copyright 2000-2010 Steven Knight -# This module is free software, and you may redistribute it and/or modify -# it under the same terms as Python itself, so long as this copyright message -# and disclaimer are retained in their original form. -# -# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, -# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF -# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -# DAMAGE. -# -# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, -# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, -# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - -__author__ = "Steven Knight " -__revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight" -__version__ = "0.37" - -import errno -import os -import os.path -import re -import shutil -import stat -import string -import sys -import tempfile -import time -import traceback -import types -import UserList - -__all__ = [ - 'diff_re', - 'fail_test', - 'no_result', - 'pass_test', - 'match_exact', - 'match_re', - 'match_re_dotall', - 'python_executable', - 'TestCmd' -] - -try: - import difflib -except ImportError: - __all__.append('simple_diff') - -def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) - -try: - from UserString import UserString -except ImportError: - class UserString: - pass - -if hasattr(types, 'UnicodeType'): - def is_String(e): - return type(e) is types.StringType \ - or type(e) is types.UnicodeType \ - or isinstance(e, UserString) -else: - def is_String(e): - return type(e) is types.StringType or isinstance(e, UserString) - -tempfile.template = 'testcmd.' -if os.name in ('posix', 'nt'): - tempfile.template = 'testcmd.' + str(os.getpid()) + '.' -else: - tempfile.template = 'testcmd.' - -re_space = re.compile('\s') - -_Cleanup = [] - -_chain_to_exitfunc = None - -def _clean(): - global _Cleanup - cleanlist = filter(None, _Cleanup) - del _Cleanup[:] - cleanlist.reverse() - for test in cleanlist: - test.cleanup() - if _chain_to_exitfunc: - _chain_to_exitfunc() - -try: - import atexit -except ImportError: - # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc - try: - _chain_to_exitfunc = sys.exitfunc - except AttributeError: - pass - sys.exitfunc = _clean -else: - atexit.register(_clean) - -try: - zip -except NameError: - def zip(*lists): - result = [] - for i in xrange(min(map(len, lists))): - result.append(tuple(map(lambda l, i=i: l[i], lists))) - return result - -class Collector: - def __init__(self, top): - self.entries = [top] - def __call__(self, arg, dirname, names): - pathjoin = lambda n, d=dirname: os.path.join(d, n) - self.entries.extend(map(pathjoin, names)) - -def _caller(tblist, skip): - string = "" - arr = [] - for file, line, name, text in tblist: - if file[-10:] == "TestCmd.py": - break - arr = [(file, line, name, text)] + arr - atfrom = "at" - for file, line, name, text in arr[skip:]: - if name in ("?", ""): - name = "" - else: - name = " (" + name + ")" - string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name)) - atfrom = "\tfrom" - return string - -def fail_test(self = None, condition = 1, function = None, skip = 0): - """Cause the test to fail. - - By default, the fail_test() method reports that the test FAILED - and exits with a status of 1. If a condition argument is supplied, - the test fails only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - of = "" - desc = "" - sep = " " - if not self is None: - if self.program: - of = " of " + self.program - sep = "\n\t" - if self.description: - desc = " [" + self.description + "]" - sep = "\n\t" - - at = _caller(traceback.extract_stack(), skip) - sys.stderr.write("FAILED test" + of + desc + sep + at) - - sys.exit(1) - -def no_result(self = None, condition = 1, function = None, skip = 0): - """Causes a test to exit with no valid result. - - By default, the no_result() method reports NO RESULT for the test - and exits with a status of 2. If a condition argument is supplied, - the test fails only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - of = "" - desc = "" - sep = " " - if not self is None: - if self.program: - of = " of " + self.program - sep = "\n\t" - if self.description: - desc = " [" + self.description + "]" - sep = "\n\t" - - at = _caller(traceback.extract_stack(), skip) - sys.stderr.write("NO RESULT for test" + of + desc + sep + at) - - sys.exit(2) - -def pass_test(self = None, condition = 1, function = None): - """Causes a test to pass. - - By default, the pass_test() method reports PASSED for the test - and exits with a status of 0. If a condition argument is supplied, - the test passes only if the condition is true. - """ - if not condition: - return - if not function is None: - function() - sys.stderr.write("PASSED\n") - sys.exit(0) - -def match_exact(lines = None, matches = None): - """ - """ - if not is_List(lines): - lines = string.split(lines, "\n") - if not is_List(matches): - matches = string.split(matches, "\n") - if len(lines) != len(matches): - return - for i in range(len(lines)): - if lines[i] != matches[i]: - return - return 1 - -def match_re(lines = None, res = None): - """ - """ - if not is_List(lines): - lines = string.split(lines, "\n") - if not is_List(res): - res = string.split(res, "\n") - if len(lines) != len(res): - return - for i in range(len(lines)): - s = "^" + res[i] + "$" - try: - expr = re.compile(s) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if not expr.search(lines[i]): - return - return 1 - -def match_re_dotall(lines = None, res = None): - """ - """ - if not type(lines) is type(""): - lines = string.join(lines, "\n") - if not type(res) is type(""): - res = string.join(res, "\n") - s = "^" + res + "$" - try: - expr = re.compile(s, re.DOTALL) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if expr.match(lines): - return 1 - -try: - import difflib -except ImportError: - pass -else: - def simple_diff(a, b, fromfile='', tofile='', - fromfiledate='', tofiledate='', n=3, lineterm='\n'): - """ - A function with the same calling signature as difflib.context_diff - (diff -c) and difflib.unified_diff (diff -u) but which prints - output like the simple, unadorned 'diff" command. - """ - sm = difflib.SequenceMatcher(None, a, b) - def comma(x1, x2): - return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2) - result = [] - for op, a1, a2, b1, b2 in sm.get_opcodes(): - if op == 'delete': - result.append("%sd%d" % (comma(a1, a2), b1)) - result.extend(map(lambda l: '< ' + l, a[a1:a2])) - elif op == 'insert': - result.append("%da%s" % (a1, comma(b1, b2))) - result.extend(map(lambda l: '> ' + l, b[b1:b2])) - elif op == 'replace': - result.append("%sc%s" % (comma(a1, a2), comma(b1, b2))) - result.extend(map(lambda l: '< ' + l, a[a1:a2])) - result.append('---') - result.extend(map(lambda l: '> ' + l, b[b1:b2])) - return result - -def diff_re(a, b, fromfile='', tofile='', - fromfiledate='', tofiledate='', n=3, lineterm='\n'): - """ - A simple "diff" of two sets of lines when the expected lines - are regular expressions. This is a really dumb thing that - just compares each line in turn, so it doesn't look for - chunks of matching lines and the like--but at least it lets - you know exactly which line first didn't compare correctl... - """ - result = [] - diff = len(a) - len(b) - if diff < 0: - a = a + ['']*(-diff) - elif diff > 0: - b = b + ['']*diff - i = 0 - for aline, bline in zip(a, b): - s = "^" + aline + "$" - try: - expr = re.compile(s) - except re.error, e: - msg = "Regular expression error in %s: %s" - raise re.error, msg % (repr(s), e[0]) - if not expr.search(bline): - result.append("%sc%s" % (i+1, i+1)) - result.append('< ' + repr(a[i])) - result.append('---') - result.append('> ' + repr(b[i])) - i = i+1 - return result - -if os.name == 'java': - - python_executable = os.path.join(sys.prefix, 'jython') - -else: - - python_executable = sys.executable - -if sys.platform == 'win32': - - default_sleep_seconds = 2 - - def where_is(file, path=None, pathext=None): - if path is None: - path = os.environ['PATH'] - if is_String(path): - path = string.split(path, os.pathsep) - if pathext is None: - pathext = os.environ['PATHEXT'] - if is_String(pathext): - pathext = string.split(pathext, os.pathsep) - for ext in pathext: - if string.lower(ext) == string.lower(file[-len(ext):]): - pathext = [''] - break - for dir in path: - f = os.path.join(dir, file) - for ext in pathext: - fext = f + ext - if os.path.isfile(fext): - return fext - return None - -else: - - def where_is(file, path=None, pathext=None): - if path is None: - path = os.environ['PATH'] - if is_String(path): - path = string.split(path, os.pathsep) - for dir in path: - f = os.path.join(dir, file) - if os.path.isfile(f): - try: - st = os.stat(f) - except OSError: - continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0111: - return f - return None - - default_sleep_seconds = 1 - - - -try: - import subprocess -except ImportError: - # The subprocess module doesn't exist in this version of Python, - # so we're going to cobble up something that looks just enough - # like its API for our purposes below. - import new - - subprocess = new.module('subprocess') - - subprocess.PIPE = 'PIPE' - subprocess.STDOUT = 'STDOUT' - subprocess.mswindows = (sys.platform == 'win32') - - try: - import popen2 - popen2.Popen3 - except AttributeError: - class Popen3: - universal_newlines = 1 - def __init__(self, command, **kw): - if sys.platform == 'win32' and command[0] == '"': - command = '"' + command + '"' - (stdin, stdout, stderr) = os.popen3(' ' + command) - self.stdin = stdin - self.stdout = stdout - self.stderr = stderr - def close_output(self): - self.stdout.close() - self.resultcode = self.stderr.close() - def wait(self): - resultcode = self.resultcode - if os.WIFEXITED(resultcode): - return os.WEXITSTATUS(resultcode) - elif os.WIFSIGNALED(resultcode): - return os.WTERMSIG(resultcode) - else: - return None - - else: - try: - popen2.Popen4 - except AttributeError: - # A cribbed Popen4 class, with some retrofitted code from - # the Python 1.5 Popen3 class methods to do certain things - # by hand. - class Popen4(popen2.Popen3): - childerr = None - - def __init__(self, cmd, bufsize=-1): - p2cread, p2cwrite = os.pipe() - c2pread, c2pwrite = os.pipe() - self.pid = os.fork() - if self.pid == 0: - # Child - os.dup2(p2cread, 0) - os.dup2(c2pwrite, 1) - os.dup2(c2pwrite, 2) - for i in range(3, popen2.MAXFD): - try: - os.close(i) - except: pass - try: - os.execvp(cmd[0], cmd) - finally: - os._exit(1) - # Shouldn't come here, I guess - os._exit(1) - os.close(p2cread) - self.tochild = os.fdopen(p2cwrite, 'w', bufsize) - os.close(c2pwrite) - self.fromchild = os.fdopen(c2pread, 'r', bufsize) - popen2._active.append(self) - - popen2.Popen4 = Popen4 - - class Popen3(popen2.Popen3, popen2.Popen4): - universal_newlines = 1 - def __init__(self, command, **kw): - if kw.get('stderr') == 'STDOUT': - apply(popen2.Popen4.__init__, (self, command, 1)) - else: - apply(popen2.Popen3.__init__, (self, command, 1)) - self.stdin = self.tochild - self.stdout = self.fromchild - self.stderr = self.childerr - def wait(self, *args, **kw): - resultcode = apply(popen2.Popen3.wait, (self,)+args, kw) - if os.WIFEXITED(resultcode): - return os.WEXITSTATUS(resultcode) - elif os.WIFSIGNALED(resultcode): - return os.WTERMSIG(resultcode) - else: - return None - - subprocess.Popen = Popen3 - - - -# From Josiah Carlson, -# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554 - -PIPE = subprocess.PIPE - -if subprocess.mswindows: - from win32file import ReadFile, WriteFile - from win32pipe import PeekNamedPipe - import msvcrt -else: - import select - import fcntl - - try: fcntl.F_GETFL - except AttributeError: fcntl.F_GETFL = 3 - - try: fcntl.F_SETFL - except AttributeError: fcntl.F_SETFL = 4 - -class Popen(subprocess.Popen): - def recv(self, maxsize=None): - return self._recv('stdout', maxsize) - - def recv_err(self, maxsize=None): - return self._recv('stderr', maxsize) - - def send_recv(self, input='', maxsize=None): - return self.send(input), self.recv(maxsize), self.recv_err(maxsize) - - def get_conn_maxsize(self, which, maxsize): - if maxsize is None: - maxsize = 1024 - elif maxsize < 1: - maxsize = 1 - return getattr(self, which), maxsize - - def _close(self, which): - getattr(self, which).close() - setattr(self, which, None) - - if subprocess.mswindows: - def send(self, input): - if not self.stdin: - return None - - try: - x = msvcrt.get_osfhandle(self.stdin.fileno()) - (errCode, written) = WriteFile(x, input) - except ValueError: - return self._close('stdin') - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - x = msvcrt.get_osfhandle(conn.fileno()) - (read, nAvail, nMessage) = PeekNamedPipe(x, 0) - if maxsize < nAvail: - nAvail = maxsize - if nAvail > 0: - (errCode, read) = ReadFile(x, nAvail, None) - except ValueError: - return self._close(which) - except (subprocess.pywintypes.error, Exception), why: - if why[0] in (109, errno.ESHUTDOWN): - return self._close(which) - raise - - #if self.universal_newlines: - # read = self._translate_newlines(read) - return read - - else: - def send(self, input): - if not self.stdin: - return None - - if not select.select([], [self.stdin], [], 0)[1]: - return 0 - - try: - written = os.write(self.stdin.fileno(), input) - except OSError, why: - if why[0] == errno.EPIPE: #broken pipe - return self._close('stdin') - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - flags = fcntl.fcntl(conn, fcntl.F_GETFL) - except TypeError: - flags = None - else: - if not conn.closed: - fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK) - - try: - if not select.select([conn], [], [], 0)[0]: - return '' - - r = conn.read(maxsize) - if not r: - return self._close(which) - - #if self.universal_newlines: - # r = self._translate_newlines(r) - return r - finally: - if not conn.closed and not flags is None: - fcntl.fcntl(conn, fcntl.F_SETFL, flags) - -disconnect_message = "Other end disconnected!" - -def recv_some(p, t=.1, e=1, tr=5, stderr=0): - if tr < 1: - tr = 1 - x = time.time()+t - y = [] - r = '' - pr = p.recv - if stderr: - pr = p.recv_err - while time.time() < x or r: - r = pr() - if r is None: - if e: - raise Exception(disconnect_message) - else: - break - elif r: - y.append(r) - else: - time.sleep(max((x-time.time())/tr, 0)) - return ''.join(y) - -# TODO(3.0: rewrite to use memoryview() -def send_all(p, data): - while len(data): - sent = p.send(data) - if sent is None: - raise Exception(disconnect_message) - data = buffer(data, sent) - - - -try: - object -except NameError: - class object: - pass - - - -class TestCmd(object): - """Class TestCmd - """ - - def __init__(self, description = None, - program = None, - interpreter = None, - workdir = None, - subdir = None, - verbose = None, - match = None, - diff = None, - combine = 0, - universal_newlines = 1): - self._cwd = os.getcwd() - self.description_set(description) - self.program_set(program) - self.interpreter_set(interpreter) - if verbose is None: - try: - verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) ) - except ValueError: - verbose = 0 - self.verbose_set(verbose) - self.combine = combine - self.universal_newlines = universal_newlines - if not match is None: - self.match_function = match - else: - self.match_function = match_re - if not diff is None: - self.diff_function = diff - else: - try: - difflib - except NameError: - pass - else: - self.diff_function = simple_diff - #self.diff_function = difflib.context_diff - #self.diff_function = difflib.unified_diff - self._dirlist = [] - self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0} - if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '': - self._preserve['pass_test'] = os.environ['PRESERVE'] - self._preserve['fail_test'] = os.environ['PRESERVE'] - self._preserve['no_result'] = os.environ['PRESERVE'] - else: - try: - self._preserve['pass_test'] = os.environ['PRESERVE_PASS'] - except KeyError: - pass - try: - self._preserve['fail_test'] = os.environ['PRESERVE_FAIL'] - except KeyError: - pass - try: - self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT'] - except KeyError: - pass - self._stdout = [] - self._stderr = [] - self.status = None - self.condition = 'no_result' - self.workdir_set(workdir) - self.subdir(subdir) - - def __del__(self): - self.cleanup() - - def __repr__(self): - return "%x" % id(self) - - banner_char = '=' - banner_width = 80 - - def banner(self, s, width=None): - if width is None: - width = self.banner_width - return s + self.banner_char * (width - len(s)) - - if os.name == 'posix': - - def escape(self, arg): - "escape shell special characters" - slash = '\\' - special = '"$' - - arg = string.replace(arg, slash, slash+slash) - for c in special: - arg = string.replace(arg, c, slash+c) - - if re_space.search(arg): - arg = '"' + arg + '"' - return arg - - else: - - # Windows does not allow special characters in file names - # anyway, so no need for an escape function, we will just quote - # the arg. - def escape(self, arg): - if re_space.search(arg): - arg = '"' + arg + '"' - return arg - - def canonicalize(self, path): - if is_List(path): - path = apply(os.path.join, tuple(path)) - if not os.path.isabs(path): - path = os.path.join(self.workdir, path) - return path - - def chmod(self, path, mode): - """Changes permissions on the specified file or directory - path name.""" - path = self.canonicalize(path) - os.chmod(path, mode) - - def cleanup(self, condition = None): - """Removes any temporary working directories for the specified - TestCmd environment. If the environment variable PRESERVE was - set when the TestCmd environment was created, temporary working - directories are not removed. If any of the environment variables - PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set - when the TestCmd environment was created, then temporary working - directories are not removed if the test passed, failed, or had - no result, respectively. Temporary working directories are also - preserved for conditions specified via the preserve method. - - Typically, this method is not called directly, but is used when - the script exits to clean up temporary working directories as - appropriate for the exit status. - """ - if not self._dirlist: - return - os.chdir(self._cwd) - self.workdir = None - if condition is None: - condition = self.condition - if self._preserve[condition]: - for dir in self._dirlist: - print "Preserved directory", dir - else: - list = self._dirlist[:] - list.reverse() - for dir in list: - self.writable(dir, 1) - shutil.rmtree(dir, ignore_errors = 1) - self._dirlist = [] - - try: - global _Cleanup - _Cleanup.remove(self) - except (AttributeError, ValueError): - pass - - def command_args(self, program = None, - interpreter = None, - arguments = None): - if program: - if type(program) == type('') and not os.path.isabs(program): - program = os.path.join(self._cwd, program) - else: - program = self.program - if not interpreter: - interpreter = self.interpreter - if not type(program) in [type([]), type(())]: - program = [program] - cmd = list(program) - if interpreter: - if not type(interpreter) in [type([]), type(())]: - interpreter = [interpreter] - cmd = list(interpreter) + cmd - if arguments: - if type(arguments) == type(''): - arguments = string.split(arguments) - cmd.extend(arguments) - return cmd - - def description_set(self, description): - """Set the description of the functionality being tested. - """ - self.description = description - - try: - difflib - except NameError: - def diff(self, a, b, name, *args, **kw): - print self.banner('Expected %s' % name) - print a - print self.banner('Actual %s' % name) - print b - else: - def diff(self, a, b, name, *args, **kw): - print self.banner(name) - args = (a.splitlines(), b.splitlines()) + args - lines = apply(self.diff_function, args, kw) - for l in lines: - print l - - def fail_test(self, condition = 1, function = None, skip = 0): - """Cause the test to fail. - """ - if not condition: - return - self.condition = 'fail_test' - fail_test(self = self, - condition = condition, - function = function, - skip = skip) - - def interpreter_set(self, interpreter): - """Set the program to be used to interpret the program - under test as a script. - """ - self.interpreter = interpreter - - def match(self, lines, matches): - """Compare actual and expected file contents. - """ - return self.match_function(lines, matches) - - def match_exact(self, lines, matches): - """Compare actual and expected file contents. - """ - return match_exact(lines, matches) - - def match_re(self, lines, res): - """Compare actual and expected file contents. - """ - return match_re(lines, res) - - def match_re_dotall(self, lines, res): - """Compare actual and expected file contents. - """ - return match_re_dotall(lines, res) - - def no_result(self, condition = 1, function = None, skip = 0): - """Report that the test could not be run. - """ - if not condition: - return - self.condition = 'no_result' - no_result(self = self, - condition = condition, - function = function, - skip = skip) - - def pass_test(self, condition = 1, function = None): - """Cause the test to pass. - """ - if not condition: - return - self.condition = 'pass_test' - pass_test(self = self, condition = condition, function = function) - - def preserve(self, *conditions): - """Arrange for the temporary working directories for the - specified TestCmd environment to be preserved for one or more - conditions. If no conditions are specified, arranges for - the temporary working directories to be preserved for all - conditions. - """ - if conditions is (): - conditions = ('pass_test', 'fail_test', 'no_result') - for cond in conditions: - self._preserve[cond] = 1 - - def program_set(self, program): - """Set the executable program or script to be tested. - """ - if program and not os.path.isabs(program): - program = os.path.join(self._cwd, program) - self.program = program - - def read(self, file, mode = 'rb'): - """Reads and returns the contents of the specified file name. - The file name may be a list, in which case the elements are - concatenated with the os.path.join() method. The file is - assumed to be under the temporary working directory unless it - is an absolute path name. The I/O mode for the file may - be specified; it must begin with an 'r'. The default is - 'rb' (binary read). - """ - file = self.canonicalize(file) - if mode[0] != 'r': - raise ValueError, "mode must begin with 'r'" - return open(file, mode).read() - - def rmdir(self, dir): - """Removes the specified dir name. - The dir name may be a list, in which case the elements are - concatenated with the os.path.join() method. The dir is - assumed to be under the temporary working directory unless it - is an absolute path name. - The dir must be empty. - """ - dir = self.canonicalize(dir) - os.rmdir(dir) - - def start(self, program = None, - interpreter = None, - arguments = None, - universal_newlines = None, - **kw): - """ - Starts a program or script for the test environment. - - The specified program will have the original directory - prepended unless it is enclosed in a [list]. - """ - cmd = self.command_args(program, interpreter, arguments) - cmd_string = string.join(map(self.escape, cmd), ' ') - if self.verbose: - sys.stderr.write(cmd_string + "\n") - if universal_newlines is None: - universal_newlines = self.universal_newlines - - # On Windows, if we make stdin a pipe when we plan to send - # no input, and the test program exits before - # Popen calls msvcrt.open_osfhandle, that call will fail. - # So don't use a pipe for stdin if we don't need one. - stdin = kw.get('stdin', None) - if stdin is not None: - stdin = subprocess.PIPE - - combine = kw.get('combine', self.combine) - if combine: - stderr_value = subprocess.STDOUT - else: - stderr_value = subprocess.PIPE - - return Popen(cmd, - stdin=stdin, - stdout=subprocess.PIPE, - stderr=stderr_value, - universal_newlines=universal_newlines) - - def finish(self, popen, **kw): - """ - Finishes and waits for the process being run under control of - the specified popen argument, recording the exit status, - standard output and error output. - """ - popen.stdin.close() - self.status = popen.wait() - if not self.status: - self.status = 0 - self._stdout.append(popen.stdout.read()) - if popen.stderr: - stderr = popen.stderr.read() - else: - stderr = '' - self._stderr.append(stderr) - - def run(self, program = None, - interpreter = None, - arguments = None, - chdir = None, - stdin = None, - universal_newlines = None): - """Runs a test of the program or script for the test - environment. Standard output and error output are saved for - future retrieval via the stdout() and stderr() methods. - - The specified program will have the original directory - prepended unless it is enclosed in a [list]. - """ - if chdir: - oldcwd = os.getcwd() - if not os.path.isabs(chdir): - chdir = os.path.join(self.workpath(chdir)) - if self.verbose: - sys.stderr.write("chdir(" + chdir + ")\n") - os.chdir(chdir) - p = self.start(program, - interpreter, - arguments, - universal_newlines, - stdin=stdin) - if stdin: - if is_List(stdin): - for line in stdin: - p.stdin.write(line) - else: - p.stdin.write(stdin) - p.stdin.close() - - out = p.stdout.read() - if p.stderr is None: - err = '' - else: - err = p.stderr.read() - try: - close_output = p.close_output - except AttributeError: - p.stdout.close() - if not p.stderr is None: - p.stderr.close() - else: - close_output() - - self._stdout.append(out) - self._stderr.append(err) - - self.status = p.wait() - if not self.status: - self.status = 0 - - if chdir: - os.chdir(oldcwd) - if self.verbose >= 2: - write = sys.stdout.write - write('============ STATUS: %d\n' % self.status) - out = self.stdout() - if out or self.verbose >= 3: - write('============ BEGIN STDOUT (len=%d):\n' % len(out)) - write(out) - write('============ END STDOUT\n') - err = self.stderr() - if err or self.verbose >= 3: - write('============ BEGIN STDERR (len=%d)\n' % len(err)) - write(err) - write('============ END STDERR\n') - - def sleep(self, seconds = default_sleep_seconds): - """Sleeps at least the specified number of seconds. If no - number is specified, sleeps at least the minimum number of - seconds necessary to advance file time stamps on the current - system. Sleeping more seconds is all right. - """ - time.sleep(seconds) - - def stderr(self, run = None): - """Returns the error output from the specified run number. - If there is no specified run number, then returns the error - output of the last run. If the run number is less than zero, - then returns the error output from that many runs back from the - current run. - """ - if not run: - run = len(self._stderr) - elif run < 0: - run = len(self._stderr) + run - run = run - 1 - return self._stderr[run] - - def stdout(self, run = None): - """Returns the standard output from the specified run number. - If there is no specified run number, then returns the standard - output of the last run. If the run number is less than zero, - then returns the standard output from that many runs back from - the current run. - """ - if not run: - run = len(self._stdout) - elif run < 0: - run = len(self._stdout) + run - run = run - 1 - return self._stdout[run] - - def subdir(self, *subdirs): - """Create new subdirectories under the temporary working - directory, one for each argument. An argument may be a list, - in which case the list elements are concatenated using the - os.path.join() method. Subdirectories multiple levels deep - must be created using a separate argument for each level: - - test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory']) - - Returns the number of subdirectories actually created. - """ - count = 0 - for sub in subdirs: - if sub is None: - continue - if is_List(sub): - sub = apply(os.path.join, tuple(sub)) - new = os.path.join(self.workdir, sub) - try: - os.mkdir(new) - except OSError: - pass - else: - count = count + 1 - return count - - def symlink(self, target, link): - """Creates a symlink to the specified target. - The link name may be a list, in which case the elements are - concatenated with the os.path.join() method. The link is - assumed to be under the temporary working directory unless it - is an absolute path name. The target is *not* assumed to be - under the temporary working directory. - """ - link = self.canonicalize(link) - os.symlink(target, link) - - def tempdir(self, path=None): - """Creates a temporary directory. - A unique directory name is generated if no path name is specified. - The directory is created, and will be removed when the TestCmd - object is destroyed. - """ - if path is None: - try: - path = tempfile.mktemp(prefix=tempfile.template) - except TypeError: - path = tempfile.mktemp() - os.mkdir(path) - - # Symlinks in the path will report things - # differently from os.getcwd(), so chdir there - # and back to fetch the canonical path. - cwd = os.getcwd() - try: - os.chdir(path) - path = os.getcwd() - finally: - os.chdir(cwd) - - # Uppercase the drive letter since the case of drive - # letters is pretty much random on win32: - drive,rest = os.path.splitdrive(path) - if drive: - path = string.upper(drive) + rest - - # - self._dirlist.append(path) - global _Cleanup - try: - _Cleanup.index(self) - except ValueError: - _Cleanup.append(self) - - return path - - def touch(self, path, mtime=None): - """Updates the modification time on the specified file or - directory path name. The default is to update to the - current time if no explicit modification time is specified. - """ - path = self.canonicalize(path) - atime = os.path.getatime(path) - if mtime is None: - mtime = time.time() - os.utime(path, (atime, mtime)) - - def unlink(self, file): - """Unlinks the specified file name. - The file name may be a list, in which case the elements are - concatenated with the os.path.join() method. The file is - assumed to be under the temporary working directory unless it - is an absolute path name. - """ - file = self.canonicalize(file) - os.unlink(file) - - def verbose_set(self, verbose): - """Set the verbose level. - """ - self.verbose = verbose - - def where_is(self, file, path=None, pathext=None): - """Find an executable file. - """ - if is_List(file): - file = apply(os.path.join, tuple(file)) - if not os.path.isabs(file): - file = where_is(file, path, pathext) - return file - - def workdir_set(self, path): - """Creates a temporary working directory with the specified - path name. If the path is a null string (''), a unique - directory name is created. - """ - if (path != None): - if path == '': - path = None - path = self.tempdir(path) - self.workdir = path - - def workpath(self, *args): - """Returns the absolute path name to a subdirectory or file - within the current temporary working directory. Concatenates - the temporary working directory name with the specified - arguments using the os.path.join() method. - """ - return apply(os.path.join, (self.workdir,) + tuple(args)) - - def readable(self, top, read=1): - """Make the specified directory tree readable (read == 1) - or not (read == None). - - This method has no effect on Windows systems, which use a - completely different mechanism to control file readability. - """ - - if sys.platform == 'win32': - return - - if read: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD)) - - if os.path.isfile(top): - # If it's a file, that's easy, just chmod it. - do_chmod(top) - elif read: - # It's a directory and we're trying to turn on read - # permission, so it's also pretty easy, just chmod the - # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable - # read permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. - do_chmod(top) - - def chmod_entries(arg, dirname, names, do_chmod=do_chmod): - for n in names: - do_chmod(os.path.join(dirname, n)) - - os.path.walk(top, chmod_entries, None) - else: - # It's a directory and we're trying to turn off read - # permission, which means we have to chmod the directoreis - # in the tree bottom-up, lest disabling read permission from - # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top - # down, so we just use an object to collect a list of all - # of the entries in the tree, reverse the list, and then - # chmod the reversed (bottom-up) list. - col = Collector(top) - os.path.walk(top, col, None) - col.entries.reverse() - for d in col.entries: do_chmod(d) - - def writable(self, top, write=1): - """Make the specified directory tree writable (write == 1) - or not (write == None). - """ - - if sys.platform == 'win32': - - if write: - def do_chmod(fname): - try: os.chmod(fname, stat.S_IWRITE) - except OSError: pass - else: - def do_chmod(fname): - try: os.chmod(fname, stat.S_IREAD) - except OSError: pass - - else: - - if write: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200)) - - if os.path.isfile(top): - do_chmod(top) - else: - col = Collector(top) - os.path.walk(top, col, None) - for d in col.entries: do_chmod(d) - - def executable(self, top, execute=1): - """Make the specified directory tree executable (execute == 1) - or not (execute == None). - - This method has no effect on Windows systems, which use a - completely different mechanism to control file executability. - """ - - if sys.platform == 'win32': - return - - if execute: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC)) - else: - def do_chmod(fname): - try: st = os.stat(fname) - except OSError: pass - else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC)) - - if os.path.isfile(top): - # If it's a file, that's easy, just chmod it. - do_chmod(top) - elif execute: - # It's a directory and we're trying to turn on execute - # permission, so it's also pretty easy, just chmod the - # directory and then chmod every entry on our walk down the - # tree. Because os.path.walk() is top-down, we'll enable - # execute permission on any directories that have it disabled - # before os.path.walk() tries to list their contents. - do_chmod(top) - - def chmod_entries(arg, dirname, names, do_chmod=do_chmod): - for n in names: - do_chmod(os.path.join(dirname, n)) - - os.path.walk(top, chmod_entries, None) - else: - # It's a directory and we're trying to turn off execute - # permission, which means we have to chmod the directories - # in the tree bottom-up, lest disabling execute permission from - # the top down get in the way of being able to get at lower - # parts of the tree. But os.path.walk() visits things top - # down, so we just use an object to collect a list of all - # of the entries in the tree, reverse the list, and then - # chmod the reversed (bottom-up) list. - col = Collector(top) - os.path.walk(top, col, None) - col.entries.reverse() - for d in col.entries: do_chmod(d) - - def write(self, file, content, mode = 'wb'): - """Writes the specified content text (second argument) to the - specified file name (first argument). The file name may be - a list, in which case the elements are concatenated with the - os.path.join() method. The file is created under the temporary - working directory. Any subdirectories in the path must already - exist. The I/O mode for the file may be specified; it must - begin with a 'w'. The default is 'wb' (binary write). - """ - file = self.canonicalize(file) - if mode[0] != 'w': - raise ValueError, "mode must begin with 'w'" - open(file, mode).write(content) - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/third_party/gyp/test/lib/TestCommon.py b/third_party/gyp/test/lib/TestCommon.py deleted file mode 100644 index 4aa7185..0000000 --- a/third_party/gyp/test/lib/TestCommon.py +++ /dev/null @@ -1,581 +0,0 @@ -""" -TestCommon.py: a testing framework for commands and scripts - with commonly useful error handling - -The TestCommon module provides a simple, high-level interface for writing -tests of executable commands and scripts, especially commands and scripts -that interact with the file system. All methods throw exceptions and -exit on failure, with useful error messages. This makes a number of -explicit checks unnecessary, making the test scripts themselves simpler -to write and easier to read. - -The TestCommon class is a subclass of the TestCmd class. In essence, -TestCommon is a wrapper that handles common TestCmd error conditions in -useful ways. You can use TestCommon directly, or subclass it for your -program and add additional (or override) methods to tailor it to your -program's specific needs. Alternatively, the TestCommon class serves -as a useful example of how to define your own TestCmd subclass. - -As a subclass of TestCmd, TestCommon provides access to all of the -variables and methods from the TestCmd module. Consequently, you can -use any variable or method documented in the TestCmd module without -having to explicitly import TestCmd. - -A TestCommon environment object is created via the usual invocation: - - import TestCommon - test = TestCommon.TestCommon() - -You can use all of the TestCmd keyword arguments when instantiating a -TestCommon object; see the TestCmd documentation for details. - -Here is an overview of the methods and keyword arguments that are -provided by the TestCommon class: - - test.must_be_writable('file1', ['file2', ...]) - - test.must_contain('file', 'required text\n') - - test.must_contain_all_lines(output, lines, ['title', find]) - - test.must_contain_any_line(output, lines, ['title', find]) - - test.must_exist('file1', ['file2', ...]) - - test.must_match('file', "expected contents\n") - - test.must_not_be_writable('file1', ['file2', ...]) - - test.must_not_contain('file', 'banned text\n') - - test.must_not_contain_any_line(output, lines, ['title', find]) - - test.must_not_exist('file1', ['file2', ...]) - - test.run(options = "options to be prepended to arguments", - stdout = "expected standard output from the program", - stderr = "expected error output from the program", - status = expected_status, - match = match_function) - -The TestCommon module also provides the following variables - - TestCommon.python_executable - TestCommon.exe_suffix - TestCommon.obj_suffix - TestCommon.shobj_prefix - TestCommon.shobj_suffix - TestCommon.lib_prefix - TestCommon.lib_suffix - TestCommon.dll_prefix - TestCommon.dll_suffix - -""" - -# Copyright 2000-2010 Steven Knight -# This module is free software, and you may redistribute it and/or modify -# it under the same terms as Python itself, so long as this copyright message -# and disclaimer are retained in their original form. -# -# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, -# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF -# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -# DAMAGE. -# -# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, -# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, -# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - -__author__ = "Steven Knight " -__revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight" -__version__ = "0.37" - -import copy -import os -import os.path -import stat -import string -import sys -import types -import UserList - -from TestCmd import * -from TestCmd import __all__ - -__all__.extend([ 'TestCommon', - 'exe_suffix', - 'obj_suffix', - 'shobj_prefix', - 'shobj_suffix', - 'lib_prefix', - 'lib_suffix', - 'dll_prefix', - 'dll_suffix', - ]) - -# Variables that describe the prefixes and suffixes on this system. -if sys.platform == 'win32': - exe_suffix = '.exe' - obj_suffix = '.obj' - shobj_suffix = '.obj' - shobj_prefix = '' - lib_prefix = '' - lib_suffix = '.lib' - dll_prefix = '' - dll_suffix = '.dll' -elif sys.platform == 'cygwin': - exe_suffix = '.exe' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = '' - dll_suffix = '.dll' -elif string.find(sys.platform, 'irix') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.o' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.so' -elif string.find(sys.platform, 'darwin') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.dylib' -elif string.find(sys.platform, 'sunos') != -1: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = 'so_' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.dylib' -else: - exe_suffix = '' - obj_suffix = '.o' - shobj_suffix = '.os' - shobj_prefix = '' - lib_prefix = 'lib' - lib_suffix = '.a' - dll_prefix = 'lib' - dll_suffix = '.so' - -def is_List(e): - return type(e) is types.ListType \ - or isinstance(e, UserList.UserList) - -def is_writable(f): - mode = os.stat(f)[stat.ST_MODE] - return mode & stat.S_IWUSR - -def separate_files(flist): - existing = [] - missing = [] - for f in flist: - if os.path.exists(f): - existing.append(f) - else: - missing.append(f) - return existing, missing - -if os.name == 'posix': - def _failed(self, status = 0): - if self.status is None or status is None: - return None - return _status(self) != status - def _status(self): - return self.status -elif os.name == 'nt': - def _failed(self, status = 0): - return not (self.status is None or status is None) and \ - self.status != status - def _status(self): - return self.status - -class TestCommon(TestCmd): - - # Additional methods from the Perl Test::Cmd::Common module - # that we may wish to add in the future: - # - # $test->subdir('subdir', ...); - # - # $test->copy('src_file', 'dst_file'); - - def __init__(self, **kw): - """Initialize a new TestCommon instance. This involves just - calling the base class initialization, and then changing directory - to the workdir. - """ - apply(TestCmd.__init__, [self], kw) - os.chdir(self.workdir) - - def must_be_writable(self, *files): - """Ensures that the specified file(s) exist and are writable. - An individual file can be specified as a list of directory names, - in which case the pathname will be constructed by concatenating - them. Exits FAILED if any of the files does not exist or is - not writable. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing, missing = separate_files(files) - unwritable = filter(lambda x, iw=is_writable: not iw(x), existing) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - if unwritable: - print "Unwritable files: `%s'" % string.join(unwritable, "', `") - self.fail_test(missing + unwritable) - - def must_contain(self, file, required, mode = 'rb'): - """Ensures that the specified file contains the required text. - """ - file_contents = self.read(file, mode) - contains = (string.find(file_contents, required) != -1) - if not contains: - print "File `%s' does not contain required string." % file - print self.banner('Required string ') - print required - print self.banner('%s contents ' % file) - print file_contents - self.fail_test(not contains) - - def must_contain_all_lines(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - contains all of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - missing = [] - for line in lines: - if not find(output, line): - missing.append(line) - - if missing: - if title is None: - title = 'output' - sys.stdout.write("Missing expected lines from %s:\n" % title) - for line in missing: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_contain_any_line(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - contains at least one of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - for line in lines: - if find(output, line): - return - - if title is None: - title = 'output' - sys.stdout.write("Missing any expected line from %s:\n" % title) - for line in lines: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_contain_lines(self, lines, output, title=None): - # Deprecated; retain for backwards compatibility. - return self.must_contain_all_lines(output, lines, title) - - def must_exist(self, *files): - """Ensures that the specified file(s) must exist. An individual - file be specified as a list of directory names, in which case the - pathname will be constructed by concatenating them. Exits FAILED - if any of the files does not exist. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - missing = filter(lambda x: not os.path.exists(x), files) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - self.fail_test(missing) - - def must_match(self, file, expect, mode = 'rb'): - """Matches the contents of the specified file (first argument) - against the expected contents (second argument). The expected - contents are a list of lines or a string which will be split - on newlines. - """ - file_contents = self.read(file, mode) - try: - self.fail_test(not self.match(file_contents, expect)) - except KeyboardInterrupt: - raise - except: - print "Unexpected contents of `%s'" % file - self.diff(expect, file_contents, 'contents ') - raise - - def must_not_contain(self, file, banned, mode = 'rb'): - """Ensures that the specified file doesn't contain the banned text. - """ - file_contents = self.read(file, mode) - contains = (string.find(file_contents, banned) != -1) - if contains: - print "File `%s' contains banned string." % file - print self.banner('Banned string ') - print banned - print self.banner('%s contents ' % file) - print file_contents - self.fail_test(contains) - - def must_not_contain_any_line(self, output, lines, title=None, find=None): - """Ensures that the specified output string (first argument) - does not contain any of the specified lines (second argument). - - An optional third argument can be used to describe the type - of output being searched, and only shows up in failure output. - - An optional fourth argument can be used to supply a different - function, of the form "find(line, output), to use when searching - for lines in the output. - """ - if find is None: - find = lambda o, l: string.find(o, l) != -1 - unexpected = [] - for line in lines: - if find(output, line): - unexpected.append(line) - - if unexpected: - if title is None: - title = 'output' - sys.stdout.write("Unexpected lines in %s:\n" % title) - for line in unexpected: - sys.stdout.write(' ' + repr(line) + '\n') - sys.stdout.write(self.banner(title + ' ')) - sys.stdout.write(output) - self.fail_test() - - def must_not_contain_lines(self, lines, output, title=None): - return self.must_not_contain_any_line(output, lines, title) - - def must_not_exist(self, *files): - """Ensures that the specified file(s) must not exist. - An individual file be specified as a list of directory names, in - which case the pathname will be constructed by concatenating them. - Exits FAILED if any of the files exists. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing = filter(os.path.exists, files) - if existing: - print "Unexpected files exist: `%s'" % string.join(existing, "', `") - self.fail_test(existing) - - - def must_not_be_writable(self, *files): - """Ensures that the specified file(s) exist and are not writable. - An individual file can be specified as a list of directory names, - in which case the pathname will be constructed by concatenating - them. Exits FAILED if any of the files does not exist or is - writable. - """ - files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) - existing, missing = separate_files(files) - writable = filter(is_writable, existing) - if missing: - print "Missing files: `%s'" % string.join(missing, "', `") - if writable: - print "Writable files: `%s'" % string.join(writable, "', `") - self.fail_test(missing + writable) - - def _complete(self, actual_stdout, expected_stdout, - actual_stderr, expected_stderr, status, match): - """ - Post-processes running a subcommand, checking for failure - status and displaying output appropriately. - """ - if _failed(self, status): - expect = '' - if status != 0: - expect = " (expected %s)" % str(status) - print "%s returned %s%s" % (self.program, str(_status(self)), expect) - print self.banner('STDOUT ') - print actual_stdout - print self.banner('STDERR ') - print actual_stderr - self.fail_test() - if not expected_stdout is None and not match(actual_stdout, expected_stdout): - self.diff(expected_stdout, actual_stdout, 'STDOUT ') - if actual_stderr: - print self.banner('STDERR ') - print actual_stderr - self.fail_test() - if not expected_stderr is None and not match(actual_stderr, expected_stderr): - print self.banner('STDOUT ') - print actual_stdout - self.diff(expected_stderr, actual_stderr, 'STDERR ') - self.fail_test() - - def start(self, program = None, - interpreter = None, - arguments = None, - universal_newlines = None, - **kw): - """ - Starts a program or script for the test environment. - - This handles the "options" keyword argument and exceptions. - """ - try: - options = kw['options'] - del kw['options'] - except KeyError: - pass - else: - if options: - if arguments is None: - arguments = options - else: - arguments = options + " " + arguments - try: - return apply(TestCmd.start, - (self, program, interpreter, arguments, universal_newlines), - kw) - except KeyboardInterrupt: - raise - except Exception, e: - print self.banner('STDOUT ') - try: - print self.stdout() - except IndexError: - pass - print self.banner('STDERR ') - try: - print self.stderr() - except IndexError: - pass - cmd_args = self.command_args(program, interpreter, arguments) - sys.stderr.write('Exception trying to execute: %s\n' % cmd_args) - raise e - - def finish(self, popen, stdout = None, stderr = '', status = 0, **kw): - """ - Finishes and waits for the process being run under control of - the specified popen argument. Additional arguments are similar - to those of the run() method: - - stdout The expected standard output from - the command. A value of None means - don't test standard output. - - stderr The expected error output from - the command. A value of None means - don't test error output. - - status The expected exit status from the - command. A value of None means don't - test exit status. - """ - apply(TestCmd.finish, (self, popen,), kw) - match = kw.get('match', self.match) - self._complete(self.stdout(), stdout, - self.stderr(), stderr, status, match) - - def run(self, options = None, arguments = None, - stdout = None, stderr = '', status = 0, **kw): - """Runs the program under test, checking that the test succeeded. - - The arguments are the same as the base TestCmd.run() method, - with the addition of: - - options Extra options that get appended to the beginning - of the arguments. - - stdout The expected standard output from - the command. A value of None means - don't test standard output. - - stderr The expected error output from - the command. A value of None means - don't test error output. - - status The expected exit status from the - command. A value of None means don't - test exit status. - - By default, this expects a successful exit (status = 0), does - not test standard output (stdout = None), and expects that error - output is empty (stderr = ""). - """ - if options: - if arguments is None: - arguments = options - else: - arguments = options + " " + arguments - kw['arguments'] = arguments - try: - match = kw['match'] - del kw['match'] - except KeyError: - match = self.match - apply(TestCmd.run, [self], kw) - self._complete(self.stdout(), stdout, - self.stderr(), stderr, status, match) - - def skip_test(self, message="Skipping test.\n"): - """Skips a test. - - Proper test-skipping behavior is dependent on the external - TESTCOMMON_PASS_SKIPS environment variable. If set, we treat - the skip as a PASS (exit 0), and otherwise treat it as NO RESULT. - In either case, we print the specified message as an indication - that the substance of the test was skipped. - - (This was originally added to support development under Aegis. - Technically, skipping a test is a NO RESULT, but Aegis would - treat that as a test failure and prevent the change from going to - the next step. Since we ddn't want to force anyone using Aegis - to have to install absolutely every tool used by the tests, we - would actually report to Aegis that a skipped test has PASSED - so that the workflow isn't held up.) - """ - if message: - sys.stdout.write(message) - sys.stdout.flush() - pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS') - if pass_skips in [None, 0, '0']: - # skip=1 means skip this function when showing where this - # result came from. They only care about the line where the - # script called test.skip_test(), not the line number where - # we call test.no_result(). - self.no_result(skip=1) - else: - # We're under the development directory for this change, - # so this is an Aegis invocation; pass the test (exit 0). - self.pass_test() - -# Local Variables: -# tab-width:4 -# indent-tabs-mode:nil -# End: -# vim: set expandtab tabstop=4 shiftwidth=4: diff --git a/third_party/gyp/test/lib/TestGyp.py b/third_party/gyp/test/lib/TestGyp.py deleted file mode 100644 index 81e170c..0000000 --- a/third_party/gyp/test/lib/TestGyp.py +++ /dev/null @@ -1,686 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -TestGyp.py: a testing framework for GYP integration tests. -""" - -import os -import re -import shutil -import stat -import sys - -import TestCommon -from TestCommon import __all__ - -__all__.extend([ - 'TestGyp', -]) - - -class TestGypBase(TestCommon.TestCommon): - """ - Class for controlling end-to-end tests of gyp generators. - - Instantiating this class will create a temporary directory and - arrange for its destruction (via the TestCmd superclass) and - copy all of the non-gyptest files in the directory hierarchy of the - executing script. - - The default behavior is to test the 'gyp' or 'gyp.bat' file in the - current directory. An alternative may be specified explicitly on - instantiation, or by setting the TESTGYP_GYP environment variable. - - This class should be subclassed for each supported gyp generator - (format). Various abstract methods below define calling signatures - used by the test scripts to invoke builds on the generated build - configuration and to run executables generated by those builds. - """ - - build_tool = None - build_tool_list = [] - - _exe = TestCommon.exe_suffix - _obj = TestCommon.obj_suffix - shobj_ = TestCommon.shobj_prefix - _shobj = TestCommon.shobj_suffix - lib_ = TestCommon.lib_prefix - _lib = TestCommon.lib_suffix - dll_ = TestCommon.dll_prefix - _dll = TestCommon.dll_suffix - - # Constants to represent different targets. - ALL = '__all__' - DEFAULT = '__default__' - - # Constants for different target types. - EXECUTABLE = '__executable__' - STATIC_LIB = '__static_lib__' - SHARED_LIB = '__shared_lib__' - - def __init__(self, gyp=None, *args, **kw): - self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0])) - - if not gyp: - gyp = os.environ.get('TESTGYP_GYP') - if not gyp: - if sys.platform == 'win32': - gyp = 'gyp.bat' - else: - gyp = 'gyp' - self.gyp = os.path.abspath(gyp) - - self.initialize_build_tool() - - if not kw.has_key('match'): - kw['match'] = TestCommon.match_exact - - if not kw.has_key('workdir'): - # Default behavior: the null string causes TestCmd to create - # a temporary directory for us. - kw['workdir'] = '' - - formats = kw.get('formats', []) - if kw.has_key('formats'): - del kw['formats'] - - super(TestGypBase, self).__init__(*args, **kw) - - excluded_formats = set([f for f in formats if f[0] == '!']) - included_formats = set(formats) - excluded_formats - if ('!'+self.format in excluded_formats or - included_formats and self.format not in included_formats): - msg = 'Invalid test for %r format; skipping test.\n' - self.skip_test(msg % self.format) - - self.copy_test_configuration(self.origin_cwd, self.workdir) - self.set_configuration(None) - - def built_file_must_exist(self, name, type=None, **kw): - """ - Fails the test if the specified built file name does not exist. - """ - return self.must_exist(self.built_file_path(name, type, **kw)) - - def built_file_must_not_exist(self, name, type=None, **kw): - """ - Fails the test if the specified built file name exists. - """ - return self.must_not_exist(self.built_file_path(name, type, **kw)) - - def built_file_must_match(self, name, contents, **kw): - """ - Fails the test if the contents of the specified built file name - do not match the specified contents. - """ - return self.must_match(self.built_file_path(name, **kw), contents) - - def built_file_must_not_match(self, name, contents, **kw): - """ - Fails the test if the contents of the specified built file name - match the specified contents. - """ - return self.must_not_match(self.built_file_path(name, **kw), contents) - - def copy_test_configuration(self, source_dir, dest_dir): - """ - Copies the test configuration from the specified source_dir - (the directory in which the test script lives) to the - specified dest_dir (a temporary working directory). - - This ignores all files and directories that begin with - the string 'gyptest', and all '.svn' subdirectories. - """ - for root, dirs, files in os.walk(source_dir): - if '.svn' in dirs: - dirs.remove('.svn') - dirs = [ d for d in dirs if not d.startswith('gyptest') ] - files = [ f for f in files if not f.startswith('gyptest') ] - for dirname in dirs: - source = os.path.join(root, dirname) - destination = source.replace(source_dir, dest_dir) - os.mkdir(destination) - if sys.platform != 'win32': - shutil.copystat(source, destination) - for filename in files: - source = os.path.join(root, filename) - destination = source.replace(source_dir, dest_dir) - shutil.copy2(source, destination) - - def initialize_build_tool(self): - """ - Initializes the .build_tool attribute. - - Searches the .build_tool_list for an executable name on the user's - $PATH. The first tool on the list is used as-is if nothing is found - on the current $PATH. - """ - for build_tool in self.build_tool_list: - if not build_tool: - continue - if os.path.isabs(build_tool): - self.build_tool = build_tool - return - build_tool = self.where_is(build_tool) - if build_tool: - self.build_tool = build_tool - return - - if self.build_tool_list: - self.build_tool = self.build_tool_list[0] - - def relocate(self, source, destination): - """ - Renames (relocates) the specified source (usually a directory) - to the specified destination, creating the destination directory - first if necessary. - - Note: Don't use this as a generic "rename" operation. In the - future, "relocating" parts of a GYP tree may affect the state of - the test to modify the behavior of later method calls. - """ - destination_dir = os.path.dirname(destination) - if not os.path.exists(destination_dir): - self.subdir(destination_dir) - os.rename(source, destination) - - def report_not_up_to_date(self): - """ - Reports that a build is not up-to-date. - - This provides common reporting for formats that have complicated - conditions for checking whether a build is up-to-date. Formats - that expect exact output from the command (make, scons) can - just set stdout= when they call the run_build() method. - """ - print "Build is not up-to-date:" - print self.banner('STDOUT ') - print self.stdout() - stderr = self.stderr() - if stderr: - print self.banner('STDERR ') - print stderr - - def run_gyp(self, gyp_file, *args, **kw): - """ - Runs gyp against the specified gyp_file with the specified args. - """ - # TODO: --depth=. works around Chromium-specific tree climbing. - args = ('--depth=.', '--format='+self.format, gyp_file) + args - return self.run(program=self.gyp, arguments=args, **kw) - - def run(self, *args, **kw): - """ - Executes a program by calling the superclass .run() method. - - This exists to provide a common place to filter out keyword - arguments implemented in this layer, without having to update - the tool-specific subclasses or clutter the tests themselves - with platform-specific code. - """ - if kw.has_key('SYMROOT'): - del kw['SYMROOT'] - super(TestGypBase, self).run(*args, **kw) - - def set_configuration(self, configuration): - """ - Sets the configuration, to be used for invoking the build - tool and testing potential built output. - """ - self.configuration = configuration - - def configuration_dirname(self): - if self.configuration: - return self.configuration.split('|')[0] - else: - return 'Default' - - def configuration_buildname(self): - if self.configuration: - return self.configuration - else: - return 'Default' - - # - # Abstract methods to be defined by format-specific subclasses. - # - - def build(self, gyp_file, target=None, **kw): - """ - Runs a build of the specified target against the configuration - generated from the specified gyp_file. - - A 'target' argument of None or the special value TestGyp.DEFAULT - specifies the default argument for the underlying build tool. - A 'target' argument of TestGyp.ALL specifies the 'all' target - (if any) of the underlying build tool. - """ - raise NotImplementedError - - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type. - """ - raise NotImplementedError - - def built_file_basename(self, name, type=None, **kw): - """ - Returns the base name of the specified file name, of the specified type. - - A bare=True keyword argument specifies that prefixes and suffixes shouldn't - be applied. - """ - if not kw.get('bare'): - if type == self.EXECUTABLE: - name = name + self._exe - elif type == self.STATIC_LIB: - name = self.lib_ + name + self._lib - elif type == self.SHARED_LIB: - name = self.dll_ + name + self._dll - return name - - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable program built from a gyp-generated configuration. - - The specified name should be independent of any particular generator. - Subclasses should find the output executable in the appropriate - output build directory, tack on any necessary executable suffix, etc. - """ - raise NotImplementedError - - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified target is up to date. - - The subclass should implement this by calling build() - (or a reasonable equivalent), checking whatever conditions - will tell it the build was an "up to date" null build, and - failing if it isn't. - """ - raise NotImplementedError - - -class TestGypGypd(TestGypBase): - """ - Subclass for testing the GYP 'gypd' generator (spit out the - internal data structure as pretty-printed Python). - """ - format = 'gypd' - - -class TestGypMake(TestGypBase): - """ - Subclass for testing the GYP Make generator. - """ - format = 'make' - build_tool_list = ['make'] - ALL = 'all' - def build(self, gyp_file, target=None, **kw): - """ - Runs a Make build using the Makefiles generated from the specified - gyp_file. - """ - arguments = kw.get('arguments', [])[:] - if self.configuration: - arguments.append('BUILDTYPE=' + self.configuration) - if target not in (None, self.DEFAULT): - arguments.append(target) - # Sub-directory builds provide per-gyp Makefiles (i.e. - # Makefile.gyp_filename), so use that if there is no Makefile. - chdir = kw.get('chdir', '') - if not os.path.exists(os.path.join(chdir, 'Makefile')): - print "NO Makefile in " + os.path.join(chdir, 'Makefile') - arguments.insert(0, '-f') - arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile') - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Make target is up to date. - """ - if target in (None, self.DEFAULT): - message_target = 'all' - else: - message_target = target - kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target - return self.build(gyp_file, target, **kw) - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by Make. - """ - configuration = self.configuration_dirname() - libdir = os.path.join('out', configuration, 'lib') - # TODO(piman): when everything is cross-compile safe, remove lib.target - os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target' - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Make. - - Built files are in the subdirectory 'out/{configuration}'. - The default is 'out/Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - - A libdir= keyword argument specifies a library subdirectory other - than the default 'obj.target'. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - configuration = self.configuration_dirname() - result.extend(['out', configuration]) - if type == self.STATIC_LIB: - result.append(kw.get('libdir', 'obj.target')) - elif type == self.SHARED_LIB: - result.append(kw.get('libdir', 'lib.target')) - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypMSVS(TestGypBase): - """ - Subclass for testing the GYP Visual Studio generator. - """ - format = 'msvs' - - u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ===' - up_to_date_re = re.compile(u, re.M) - - # Initial None element will indicate to our .initialize_build_tool() - # method below that 'devenv' was not found on %PATH%. - # - # Note: we must use devenv.com to be able to capture build output. - # Directly executing devenv.exe only sends output to BuildLog.htm. - build_tool_list = [None, 'devenv.com'] - - def initialize_build_tool(self): - """ - Initializes the Visual Studio .build_tool parameter, searching %PATH% - and %PATHEXT% for a devenv.{exe,bat,...} executable, and falling - back to a hard-coded default (on the current drive) if necessary. - """ - super(TestGypMSVS, self).initialize_build_tool() - if not self.build_tool: - # We didn't find 'devenv' on the path. Just hard-code a default, - # and revisit this if it becomes important. - possible = [ - # Note: if you're using this, set GYP_MSVS_VERSION=2008 - # to get the tests to pass. - ('C:\\Program Files (x86)', - 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files', - 'Microsoft Visual Studio 9.0', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files (x86)', - 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'), - ('C:\\Program Files', - 'Microsoft Visual Studio 8', 'Common7', 'IDE', 'devenv.com'), - ] - for build_tool in possible: - bt = os.path.join(*build_tool) - if os.path.exists(bt): - self.build_tool = bt - break - def build(self, gyp_file, target=None, rebuild=False, **kw): - """ - Runs a Visual Studio build using the configuration generated - from the specified gyp_file. - """ - configuration = self.configuration_buildname() - if rebuild: - build = '/Rebuild' - else: - build = '/Build' - arguments = kw.get('arguments', [])[:] - arguments.extend([gyp_file.replace('.gyp', '.sln'), - build, configuration]) - # Note: the Visual Studio generator doesn't add an explicit 'all' - # target, so we just treat it the same as the default. - if target not in (None, self.ALL, self.DEFAULT): - arguments.extend(['/Project', target]) - if self.configuration: - arguments.extend(['/ProjectConfig', self.configuration]) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Visual Studio target is up to date. - """ - result = self.build(gyp_file, target, **kw) - if not result: - stdout = self.stdout() - m = self.up_to_date_re.search(stdout) - if not m or m.group(1) == '0': - self.report_not_up_to_date() - self.fail_test() - return result - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by Visual Studio. - """ - configuration = self.configuration_dirname() - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Visual Studio. - - Built files are in a subdirectory that matches the configuration - name. The default is 'Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - result.append(self.configuration_dirname()) - if type == self.STATIC_LIB: - result.append('lib') - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypSCons(TestGypBase): - """ - Subclass for testing the GYP SCons generator. - """ - format = 'scons' - build_tool_list = ['scons', 'scons.py'] - ALL = 'all' - def build(self, gyp_file, target=None, **kw): - """ - Runs a scons build using the SCons configuration generated from the - specified gyp_file. - """ - arguments = kw.get('arguments', [])[:] - dirname = os.path.dirname(gyp_file) - if dirname: - arguments.extend(['-C', dirname]) - if self.configuration: - arguments.append('--mode=' + self.configuration) - if target not in (None, self.DEFAULT): - arguments.append(target) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified SCons target is up to date. - """ - if target in (None, self.DEFAULT): - up_to_date_targets = 'all' - else: - up_to_date_targets = target - up_to_date_lines = [] - for arg in up_to_date_targets.split(): - up_to_date_lines.append("scons: `%s' is up to date.\n" % arg) - kw['stdout'] = ''.join(up_to_date_lines) - arguments = kw.get('arguments', [])[:] - arguments.append('-Q') - kw['arguments'] = arguments - return self.build(gyp_file, target, **kw) - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by scons. - """ - configuration = self.configuration_dirname() - os.environ['LD_LIBRARY_PATH'] = os.path.join(configuration, 'lib') - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Scons. - - Built files are in a subdirectory that matches the configuration - name. The default is 'Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - result.append(self.configuration_dirname()) - if type in (self.STATIC_LIB, self.SHARED_LIB): - result.append('lib') - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -class TestGypXcode(TestGypBase): - """ - Subclass for testing the GYP Xcode generator. - """ - format = 'xcode' - build_tool_list = ['xcodebuild'] - - phase_script_execution = ("\n" - "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n" - " cd /\\S+\n" - " /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n" - "(make: Nothing to be done for `all'\\.\n)?") - - strip_up_to_date_expressions = [ - # Various actions or rules can run even when the overall build target - # is up to date. Strip those phases' GYP-generated output. - re.compile(phase_script_execution, re.S), - - # The message from distcc_pump can trail the "BUILD SUCCEEDED" - # message, so strip that, too. - re.compile('__________Shutting down distcc-pump include server\n', re.S), - ] - - up_to_date_ending = 'Checking Dependencies...\n** BUILD SUCCEEDED **\n' - - def build(self, gyp_file, target=None, **kw): - """ - Runs an xcodebuild using the .xcodeproj generated from the specified - gyp_file. - """ - # Be sure we're working with a copy of 'arguments' since we modify it. - # The caller may not be expecting it to be modified. - arguments = kw.get('arguments', [])[:] - arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')]) - if target == self.ALL: - arguments.append('-alltargets',) - elif target not in (None, self.DEFAULT): - arguments.extend(['-target', target]) - if self.configuration: - arguments.extend(['-configuration', self.configuration]) - symroot = kw.get('SYMROOT', '$SRCROOT/build') - if symroot: - arguments.append('SYMROOT='+symroot) - kw['arguments'] = arguments - return self.run(program=self.build_tool, **kw) - def up_to_date(self, gyp_file, target=None, **kw): - """ - Verifies that a build of the specified Xcode target is up to date. - """ - result = self.build(gyp_file, target, **kw) - if not result: - output = self.stdout() - for expression in self.strip_up_to_date_expressions: - output = expression.sub('', output) - if not output.endswith(self.up_to_date_ending): - self.report_not_up_to_date() - self.fail_test() - return result - def run_built_executable(self, name, *args, **kw): - """ - Runs an executable built by xcodebuild. - """ - configuration = self.configuration_dirname() - os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration) - # Enclosing the name in a list avoids prepending the original dir. - program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)] - return self.run(program=program, *args, **kw) - def built_file_path(self, name, type=None, **kw): - """ - Returns a path to the specified file name, of the specified type, - as built by Xcode. - - Built files are in the subdirectory 'build/{configuration}'. - The default is 'build/Default'. - - A chdir= keyword argument specifies the source directory - relative to which the output subdirectory can be found. - - "type" values of STATIC_LIB or SHARED_LIB append the necessary - prefixes and suffixes to a platform-independent library base name. - """ - result = [] - chdir = kw.get('chdir') - if chdir: - result.append(chdir) - configuration = self.configuration_dirname() - result.extend(['build', configuration]) - result.append(self.built_file_basename(name, type, **kw)) - return self.workpath(*result) - - -format_class_list = [ - TestGypGypd, - TestGypMake, - TestGypMSVS, - TestGypSCons, - TestGypXcode, -] - -def TestGyp(*args, **kw): - """ - Returns an appropriate TestGyp* instance for a specified GYP format. - """ - format = kw.get('format') - if format: - del kw['format'] - else: - format = os.environ.get('TESTGYP_FORMAT') - for format_class in format_class_list: - if format == format_class.format: - return format_class(*args, **kw) - raise Exception, "unknown format %r" % format diff --git a/third_party/gyp/test/library/gyptest-shared.py b/third_party/gyp/test/library/gyptest-shared.py deleted file mode 100644 index a1d2985..0000000 --- a/third_party/gyp/test/library/gyptest-shared.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with shared libraries, -including verifying that libraries are rebuilt correctly when functions -move between libraries. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib1', - chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib2', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('Hello', 'Hello again') -test.write('relocate/src/program.c', contents) - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib2_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=shared_library', - '-Dmoveable_function=lib1', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('again', 'again again') -test.write('relocate/src/program.c', contents) - -# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out -# the "moved" module. This should be done in gyp by adding a dependency -# on the generated .vcproj file itself. -test.touch('relocate/src/lib2.c') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/library/gyptest-static.py b/third_party/gyp/test/library/gyptest-static.py deleted file mode 100644 index 4bc71c4..0000000 --- a/third_party/gyp/test/library/gyptest-static.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with static libraries, -including verifying that libraries are rebuilt correctly when functions -move between libraries. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib1', - chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib2', - chdir='relocate/src') - -# Update program.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('Hello', 'Hello again') -test.write('relocate/src/program.c', contents) - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib2_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.run_gyp('library.gyp', - '-Dlibrary=static_library', - '-Dmoveable_function=lib1', - chdir='relocate/src') - -# Update program.c and lib2.c to force a rebuild. -test.sleep() -contents = test.read('relocate/src/program.c') -contents = contents.replace('again', 'again again') -test.write('relocate/src/program.c', contents) - -# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out -# the "moved" module. This should be done in gyp by adding a dependency -# on the generated .vcproj file itself. -test.touch('relocate/src/lib2.c') - -test.build('library.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello again again from program.c -Hello from lib1.c -Hello from lib2.c -Hello from lib1_moveable.c -""" -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - - -test.pass_test() diff --git a/third_party/gyp/test/library/src/lib1.c b/third_party/gyp/test/library/src/lib1.c deleted file mode 100644 index 3866b1b..0000000 --- a/third_party/gyp/test/library/src/lib1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void lib1_function(void) -{ - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/library/src/lib1_moveable.c b/third_party/gyp/test/library/src/lib1_moveable.c deleted file mode 100644 index 5d3cc1d..0000000 --- a/third_party/gyp/test/library/src/lib1_moveable.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void moveable_function(void) -{ - fprintf(stdout, "Hello from lib1_moveable.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/library/src/lib2.c b/third_party/gyp/test/library/src/lib2.c deleted file mode 100644 index 21dda72..0000000 --- a/third_party/gyp/test/library/src/lib2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void lib2_function(void) -{ - fprintf(stdout, "Hello from lib2.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/library/src/lib2_moveable.c b/third_party/gyp/test/library/src/lib2_moveable.c deleted file mode 100644 index f645071..0000000 --- a/third_party/gyp/test/library/src/lib2_moveable.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void moveable_function(void) -{ - fprintf(stdout, "Hello from lib2_moveable.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/library/src/library.gyp b/third_party/gyp/test/library/src/library.gyp deleted file mode 100644 index bc35516..0000000 --- a/third_party/gyp/test/library/src/library.gyp +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'moveable_function%': 0, - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1', - 'lib2', - ], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': '<(library)', - 'sources': [ - 'lib1.c', - ], - 'conditions': [ - ['moveable_function=="lib1"', { - 'sources': [ - 'lib1_moveable.c', - ], - }], - ], - }, - { - 'target_name': 'lib2', - 'type': '<(library)', - 'sources': [ - 'lib2.c', - ], - 'conditions': [ - ['moveable_function=="lib2"', { - 'sources': [ - 'lib2_moveable.c', - ], - }], - ], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - # Support 64-bit shared libs (also works fine for 32-bit). - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/third_party/gyp/test/library/src/program.c b/third_party/gyp/test/library/src/program.c deleted file mode 100644 index d7712cc..0000000 --- a/third_party/gyp/test/library/src/program.c +++ /dev/null @@ -1,15 +0,0 @@ -#include - -extern void lib1_function(void); -extern void lib2_function(void); -extern void moveable_function(void); - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - lib1_function(); - lib2_function(); - moveable_function(); - return 0; -} diff --git a/third_party/gyp/test/module/gyptest-default.py b/third_party/gyp/test/module/gyptest-default.py deleted file mode 100644 index 6b1c9b6..0000000 --- a/third_party/gyp/test/module/gyptest-default.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple build of a "Hello, world!" program with loadable modules. The -default for all platforms should be to output the loadable modules to the same -path as the executable. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('module.gyp', chdir='src') - -test.build('module.gyp', test.ALL, chdir='src') - -expect = """\ -Hello from program.c -Hello from lib1.c -Hello from lib2.c -""" -test.run_built_executable('program', chdir='src', stdout=expect) - -test.pass_test() diff --git a/third_party/gyp/test/module/src/lib1.c b/third_party/gyp/test/module/src/lib1.c deleted file mode 100644 index 8de0e94..0000000 --- a/third_party/gyp/test/module/src/lib1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void module_main(void) -{ - fprintf(stdout, "Hello from lib1.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/module/src/lib2.c b/third_party/gyp/test/module/src/lib2.c deleted file mode 100644 index 266396d..0000000 --- a/third_party/gyp/test/module/src/lib2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -#ifdef _WIN32 -__declspec(dllexport) -#endif -void module_main(void) -{ - fprintf(stdout, "Hello from lib2.c\n"); - fflush(stdout); -} diff --git a/third_party/gyp/test/module/src/module.gyp b/third_party/gyp/test/module/src/module.gyp deleted file mode 100644 index bb43c30..0000000 --- a/third_party/gyp/test/module/src/module.gyp +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - 'defines': ['PLATFORM_WIN'], - }], - ['OS=="mac"', { - 'defines': ['PLATFORM_MAC'], - }], - ['OS=="linux"', { - 'defines': ['PLATFORM_LINUX'], - # Support 64-bit shared libs (also works fine for 32-bit). - 'cflags': ['-fPIC'], - 'ldflags': ['-ldl'], - }], - ], - }, - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'dependencies': [ - 'lib1', - 'lib2', - ], - 'sources': [ - 'program.c', - ], - }, - { - 'target_name': 'lib1', - 'type': 'loadable_module', - 'product_name': 'lib1', - 'product_prefix': '', - 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''}, - 'sources': [ - 'lib1.c', - ], - }, - { - 'target_name': 'lib2', - 'product_name': 'lib2', - 'product_prefix': '', - 'type': 'loadable_module', - 'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''}, - 'sources': [ - 'lib2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/module/src/program.c b/third_party/gyp/test/module/src/program.c deleted file mode 100644 index a48f940..0000000 --- a/third_party/gyp/test/module/src/program.c +++ /dev/null @@ -1,111 +0,0 @@ -#include -#include - -#if defined(PLATFORM_WIN) -#include -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) -#include -#include -#include -#include -#define MAX_PATH PATH_MAX -#endif - -#if defined(PLATFORM_WIN) -#define MODULE_SUFFIX ".dll" -#elif defined(PLATFORM_MAC) -#define MODULE_SUFFIX ".dylib" -#elif defined(PLATFORM_LINUX) -#define MODULE_SUFFIX ".so" -#endif - -typedef void (*module_symbol)(void); -char bin_path[MAX_PATH + 1]; - - -void CallModule(const char* module) { - char module_path[MAX_PATH + 1]; - const char* module_function = "module_main"; - module_symbol funcptr; -#if defined(PLATFORM_WIN) - HMODULE dl; - char drive[_MAX_DRIVE]; - char dir[_MAX_DIR]; - - if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR, - NULL, 0, NULL, 0)) { - fprintf(stderr, "Failed to split executable path.\n"); - return; - } - if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) { - fprintf(stderr, "Failed to calculate module path.\n"); - return; - } - - dl = LoadLibrary(module_path); - if (!dl) { - fprintf(stderr, "Failed to open module: %s\n", module_path); - return; - } - - funcptr = (module_symbol) GetProcAddress(dl, module_function); - if (!funcptr) { - fprintf(stderr, "Failed to find symbol: %s\n", module_function); - return; - } - funcptr(); - - FreeLibrary(dl); -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) - void* dl; - char* path_copy = strdup(bin_path); - char* bin_dir = dirname(path_copy); - int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module, - MODULE_SUFFIX); - free(path_copy); - if (path_size < 0 || path_size > MAX_PATH) { - fprintf(stderr, "Failed to calculate module path.\n"); - return; - } - module_path[path_size] = 0; - - dl = dlopen(module_path, RTLD_LAZY); - if (!dl) { - fprintf(stderr, "Failed to open module: %s\n", module_path); - return; - } - - funcptr = dlsym(dl, module_function); - if (!funcptr) { - fprintf(stderr, "Failed to find symbol: %s\n", module_function); - return; - } - funcptr(); - - dlclose(dl); -#endif -} - -int main(int argc, char *argv[]) -{ - fprintf(stdout, "Hello from program.c\n"); - fflush(stdout); - -#if defined(PLATFORM_WIN) - if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) { - fprintf(stderr, "Failed to determine executable path.\n"); - return; - } -#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX) - // Using argv[0] should be OK here since we control how the tests run, and - // can avoid exec and such issues that make it unreliable. - if (!realpath(argv[0], bin_path)) { - fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]); - return; - } -#endif - - CallModule("lib1"); - CallModule("lib2"); - return 0; -} diff --git a/third_party/gyp/test/msvs/express/base/base.gyp b/third_party/gyp/test/msvs/express/base/base.gyp deleted file mode 100644 index b7c9fc6..0000000 --- a/third_party/gyp/test/msvs/express/base/base.gyp +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'a', - 'type': 'static_library', - 'sources': [ - 'a.c', - ], - }, - { - 'target_name': 'b', - 'type': 'static_library', - 'sources': [ - 'b.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/msvs/express/express.gyp b/third_party/gyp/test/msvs/express/express.gyp deleted file mode 100644 index 917abe2..0000000 --- a/third_party/gyp/test/msvs/express/express.gyp +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'express', - 'type': 'executable', - 'dependencies': [ - 'base/base.gyp:a', - 'base/base.gyp:b', - ], - 'sources': [ - 'main.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/msvs/express/gyptest-express.py b/third_party/gyp/test/msvs/express/gyptest-express.py deleted file mode 100644 index 54c06f6..0000000 --- a/third_party/gyp/test/msvs/express/gyptest-express.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that flat solutions get generated for Express versions of -Visual Studio. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['msvs']) - -test.run_gyp('express.gyp', '-G', 'msvs_version=2005') -test.must_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2008') -test.must_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2005e') -test.must_not_contain('express.sln', '(base)') - -test.run_gyp('express.gyp', '-G', 'msvs_version=2008e') -test.must_not_contain('express.sln', '(base)') - - -test.pass_test() diff --git a/third_party/gyp/test/multiple-targets/gyptest-all.py b/third_party/gyp/test/multiple-targets/gyptest-all.py deleted file mode 100644 index 9f157c4..0000000 --- a/third_party/gyp/test/multiple-targets/gyptest-all.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('multiple.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# TODO(sgk): remove stderr=None when the --generator-output= support -# gets rid of the scons warning -test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None) - -expect1 = """\ -hello from prog1.c -hello from common.c -""" - -expect2 = """\ -hello from prog2.c -hello from common.c -""" - -test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src') -test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/multiple-targets/gyptest-default.py b/third_party/gyp/test/multiple-targets/gyptest-default.py deleted file mode 100644 index 8d5072d..0000000 --- a/third_party/gyp/test/multiple-targets/gyptest-default.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('multiple.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -# TODO(sgk): remove stderr=None when the --generator-output= support -# gets rid of the scons warning -test.build('multiple.gyp', chdir='relocate/src', stderr=None) - -expect1 = """\ -hello from prog1.c -hello from common.c -""" - -expect2 = """\ -hello from prog2.c -hello from common.c -""" - -test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src') -test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/multiple-targets/src/common.c b/third_party/gyp/test/multiple-targets/src/common.c deleted file mode 100644 index f1df7c1..0000000 --- a/third_party/gyp/test/multiple-targets/src/common.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -void common(void) -{ - printf("hello from common.c\n"); - return; -} diff --git a/third_party/gyp/test/multiple-targets/src/multiple.gyp b/third_party/gyp/test/multiple-targets/src/multiple.gyp deleted file mode 100644 index 3db4ea3..0000000 --- a/third_party/gyp/test/multiple-targets/src/multiple.gyp +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - 'common.c', - ], - }, - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - 'common.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/multiple-targets/src/prog1.c b/third_party/gyp/test/multiple-targets/src/prog1.c deleted file mode 100644 index d55f8af..0000000 --- a/third_party/gyp/test/multiple-targets/src/prog1.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void common(void); - -int main(int argc, char *argv[]) -{ - printf("hello from prog1.c\n"); - common(); - return 0; -} diff --git a/third_party/gyp/test/multiple-targets/src/prog2.c b/third_party/gyp/test/multiple-targets/src/prog2.c deleted file mode 100644 index 760590e..0000000 --- a/third_party/gyp/test/multiple-targets/src/prog2.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void common(void); - -int main(int argc, char *argv[]) -{ - printf("hello from prog2.c\n"); - common(); - return 0; -} diff --git a/third_party/gyp/test/no-output/gyptest-no-output.py b/third_party/gyp/test/no-output/gyptest-no-output.py deleted file mode 100644 index 8431241..0000000 --- a/third_party/gyp/test/no-output/gyptest-no-output.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verified things don't explode when there are targets without outputs. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('nooutput.gyp', chdir='src') -test.relocate('src', 'relocate/src') -test.build('nooutput.gyp', chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/no-output/src/nooutput.gyp b/third_party/gyp/test/no-output/src/nooutput.gyp deleted file mode 100644 index c40124e..0000000 --- a/third_party/gyp/test/no-output/src/nooutput.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'no_output', - 'type': 'none', - 'direct_dependent_settings': { - 'defines': [ - 'NADA', - ], - }, - }, - ], -} diff --git a/third_party/gyp/test/product/gyptest-product.py b/third_party/gyp/test/product/gyptest-product.py deleted file mode 100644 index e9790f3..0000000 --- a/third_party/gyp/test/product/gyptest-product.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simplest-possible build of a "Hello, world!" program -using the default build target. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('product.gyp') -test.build('product.gyp') - -# executables -test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True) -test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True) - -# shared libraries -test.built_file_must_exist(test.dll_ + 'alt4' + test._dll, - test.SHARED_LIB, bare=True) -test.built_file_must_exist(test.dll_ + 'hello5.stuff', - test.SHARED_LIB, bare=True) -test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True) - -# static libraries -test.built_file_must_exist(test.lib_ + 'alt7' + test._lib, - test.STATIC_LIB, bare=True) -test.built_file_must_exist(test.lib_ + 'hello8.stuff', - test.STATIC_LIB, bare=True) -test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True) - -# alternate product_dir -test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True) -test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True) - -test.pass_test() diff --git a/third_party/gyp/test/product/hello.c b/third_party/gyp/test/product/hello.c deleted file mode 100644 index 94798f3..0000000 --- a/third_party/gyp/test/product/hello.c +++ /dev/null @@ -1,15 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int func1(void) { - return 42; -} - -int main(int argc, char *argv[]) { - printf("Hello, world!\n"); - printf("%d\n", func1()); - return 0; -} diff --git a/third_party/gyp/test/product/product.gyp b/third_party/gyp/test/product/product.gyp deleted file mode 100644 index c25eaaa..0000000 --- a/third_party/gyp/test/product/product.gyp +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'hello1', - 'product_name': 'alt1', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello2', - 'product_extension': 'stuff', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello3', - 'product_name': 'alt3', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - - { - 'target_name': 'hello4', - 'product_name': 'alt4', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello5', - 'product_extension': 'stuff', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello6', - 'product_name': 'alt6', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - - { - 'target_name': 'hello7', - 'product_name': 'alt7', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello8', - 'product_extension': 'stuff', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello9', - 'product_name': 'alt9', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello10', - 'product_name': 'alt10', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'executable', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello11', - 'product_name': 'alt11', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'shared_library', - 'sources': [ - 'hello.c', - ], - }, - { - 'target_name': 'hello12', - 'product_name': 'alt12', - 'product_extension': 'stuff', - 'product_prefix': 'yo', - 'product_dir': '<(PRODUCT_DIR)/bob', - 'type': 'static_library', - 'sources': [ - 'hello.c', - ], - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'target_defaults': { - 'cflags': ['-fPIC'], - }, - }], - ], -} diff --git a/third_party/gyp/test/rules/gyptest-all.py b/third_party/gyp/test/rules/gyptest-all.py deleted file mode 100644 index 46cfeef..0000000 --- a/third_party/gyp/test/rules/gyptest-all.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple rules when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('actions.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from function1.in -Hello from function2.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -expect = """\ -Hello from program.c -Hello from function3.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('program2', chdir=chdir, stdout=expect) - -test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n") -test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n") - -test.pass_test() diff --git a/third_party/gyp/test/rules/gyptest-default.py b/third_party/gyp/test/rules/gyptest-default.py deleted file mode 100644 index f9c7906..0000000 --- a/third_party/gyp/test/rules/gyptest-default.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies simple rules when using an explicit build target of 'all'. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('actions.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('actions.gyp', chdir='relocate/src') - -expect = """\ -Hello from program.c -Hello from function1.in -Hello from function2.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir1' -else: - chdir = 'relocate/src' -test.run_built_executable('program', chdir=chdir, stdout=expect) - -expect = """\ -Hello from program.c -Hello from function3.in -""" - -if test.format == 'xcode': - chdir = 'relocate/src/subdir3' -else: - chdir = 'relocate/src' -test.run_built_executable('program2', chdir=chdir, stdout=expect) - -test.must_match('relocate/src/subdir2/file1.out', "Hello from file1.in\n") -test.must_match('relocate/src/subdir2/file2.out', "Hello from file2.in\n") - -test.pass_test() diff --git a/third_party/gyp/test/rules/src/actions.gyp b/third_party/gyp/test/rules/src/actions.gyp deleted file mode 100644 index 3e9f8b5..0000000 --- a/third_party/gyp/test/rules/src/actions.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'pull_in_all_actions', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/none.gyp:*', - 'subdir3/executable2.gyp:*', - ], - }, - ], -} diff --git a/third_party/gyp/test/rules/src/copy-file.py b/third_party/gyp/test/rules/src/copy-file.py deleted file mode 100644 index 5a5feae..0000000 --- a/third_party/gyp/test/rules/src/copy-file.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -import sys - -contents = open(sys.argv[1], 'r').read() -open(sys.argv[2], 'wb').write(contents) - -sys.exit(0) diff --git a/third_party/gyp/test/rules/src/subdir1/executable.gyp b/third_party/gyp/test/rules/src/subdir1/executable.gyp deleted file mode 100644 index 3028577..0000000 --- a/third_party/gyp/test/rules/src/subdir1/executable.gyp +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function1.in', - 'function2.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - # TODO: fix SCons and Make to support generated files not - # in a variable-named path like <(INTERMEDIATE_DIR) - #'<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/rules/src/subdir1/function1.in b/third_party/gyp/test/rules/src/subdir1/function1.in deleted file mode 100644 index 60ff289..0000000 --- a/third_party/gyp/test/rules/src/subdir1/function1.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function1(void) -{ - printf("Hello from function1.in\n"); -} diff --git a/third_party/gyp/test/rules/src/subdir1/function2.in b/third_party/gyp/test/rules/src/subdir1/function2.in deleted file mode 100644 index 0fcfc03..0000000 --- a/third_party/gyp/test/rules/src/subdir1/function2.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void function2(void) -{ - printf("Hello from function2.in\n"); -} diff --git a/third_party/gyp/test/rules/src/subdir1/program.c b/third_party/gyp/test/rules/src/subdir1/program.c deleted file mode 100644 index 258d7f9..0000000 --- a/third_party/gyp/test/rules/src/subdir1/program.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void function1(void); -extern void function2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function1(); - function2(); - return 0; -} diff --git a/third_party/gyp/test/rules/src/subdir2/file1.in b/third_party/gyp/test/rules/src/subdir2/file1.in deleted file mode 100644 index 86ac3ad..0000000 --- a/third_party/gyp/test/rules/src/subdir2/file1.in +++ /dev/null @@ -1 +0,0 @@ -Hello from file1.in diff --git a/third_party/gyp/test/rules/src/subdir2/file2.in b/third_party/gyp/test/rules/src/subdir2/file2.in deleted file mode 100644 index bf83d8e..0000000 --- a/third_party/gyp/test/rules/src/subdir2/file2.in +++ /dev/null @@ -1 +0,0 @@ -Hello from file2.in diff --git a/third_party/gyp/test/rules/src/subdir2/none.gyp b/third_party/gyp/test/rules/src/subdir2/none.gyp deleted file mode 100644 index 38bcdab..0000000 --- a/third_party/gyp/test/rules/src/subdir2/none.gyp +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'files', - 'type': 'none', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'file1.in', - 'file2.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - '<(RULE_INPUT_ROOT).out', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/rules/src/subdir3/executable2.gyp b/third_party/gyp/test/rules/src/subdir3/executable2.gyp deleted file mode 100644 index a2a528f..0000000 --- a/third_party/gyp/test/rules/src/subdir3/executable2.gyp +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This one tests that rules are properly written if extensions are different -# between the target's sources (program.c) and the generated files -# (function3.cc) - -{ - 'targets': [ - { - 'target_name': 'program2', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'program.c', - 'function3.in', - ], - 'rules': [ - { - 'rule_name': 'copy_file', - 'extension': 'in', - 'inputs': [ - '../copy-file.py', - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/rules/src/subdir3/function3.in b/third_party/gyp/test/rules/src/subdir3/function3.in deleted file mode 100644 index 99f46ab..0000000 --- a/third_party/gyp/test/rules/src/subdir3/function3.in +++ /dev/null @@ -1,6 +0,0 @@ -#include - -extern "C" void function3(void) -{ - printf("Hello from function3.in\n"); -} diff --git a/third_party/gyp/test/rules/src/subdir3/program.c b/third_party/gyp/test/rules/src/subdir3/program.c deleted file mode 100644 index 94f6c50..0000000 --- a/third_party/gyp/test/rules/src/subdir3/program.c +++ /dev/null @@ -1,10 +0,0 @@ -#include - -extern void function3(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from program.c\n"); - function3(); - return 0; -} diff --git a/third_party/gyp/test/rules-rebuild/gyptest-all.py b/third_party/gyp/test/rules-rebuild/gyptest-all.py deleted file mode 100644 index 549d48b..0000000 --- a/third_party/gyp/test/rules-rebuild/gyptest-all.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a rule that generates multiple outputs rebuilds -correctly when the inputs change. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -if test.format == 'msvs': - msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n' - test.skip_test(msg) - -test.run_gyp('same_target.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog1.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog1.in'], contents) - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog2.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog2.in'], contents) - -test.build('same_target.gyp', test.ALL, chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in AGAIN! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.pass_test() diff --git a/third_party/gyp/test/rules-rebuild/gyptest-default.py b/third_party/gyp/test/rules-rebuild/gyptest-default.py deleted file mode 100644 index a669105..0000000 --- a/third_party/gyp/test/rules-rebuild/gyptest-default.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a rule that generates multiple outputs rebuilds -correctly when the inputs change. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -if test.format == 'msvs': - msg = 'TODO: issue 120: disabled on MSVS due to test execution problems.\n' - test.skip_test(msg) - -test.run_gyp('same_target.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog1.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog1.in'], contents) - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.sleep() -contents = test.read(['relocate', 'src', 'prog2.in']) -contents = contents.replace('!', ' AGAIN!') -test.write(['relocate', 'src', 'prog2.in'], contents) - -test.build('same_target.gyp', chdir='relocate/src') - -expect = """\ -Hello from main.c -Hello from prog1.in AGAIN! -Hello from prog2.in AGAIN! -""" - -test.run_built_executable('program', chdir='relocate/src', stdout=expect) - -test.up_to_date('same_target.gyp', 'program', chdir='relocate/src') - - -test.pass_test() diff --git a/third_party/gyp/test/rules-rebuild/src/main.c b/third_party/gyp/test/rules-rebuild/src/main.c deleted file mode 100644 index bdc5ec8..0000000 --- a/third_party/gyp/test/rules-rebuild/src/main.c +++ /dev/null @@ -1,12 +0,0 @@ -#include - -extern void prog1(void); -extern void prog2(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from main.c\n"); - prog1(); - prog2(); - return 0; -} diff --git a/third_party/gyp/test/rules-rebuild/src/make-sources.py b/third_party/gyp/test/rules-rebuild/src/make-sources.py deleted file mode 100644 index 6fce558..0000000 --- a/third_party/gyp/test/rules-rebuild/src/make-sources.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -import sys - -assert len(sys.argv) == 4, sys.argv - -(in_file, c_file, h_file) = sys.argv[1:] - -def write_file(filename, contents): - open(filename, 'wb').write(contents) - -write_file(c_file, open(in_file, 'rb').read()) - -write_file(h_file, '#define NAME "%s"\n' % in_file) - -sys.exit(0) diff --git a/third_party/gyp/test/rules-rebuild/src/prog1.in b/third_party/gyp/test/rules-rebuild/src/prog1.in deleted file mode 100644 index 191b00e..0000000 --- a/third_party/gyp/test/rules-rebuild/src/prog1.in +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "prog1.h" - -void prog1(void) -{ - printf("Hello from %s!\n", NAME); -} diff --git a/third_party/gyp/test/rules-rebuild/src/prog2.in b/third_party/gyp/test/rules-rebuild/src/prog2.in deleted file mode 100644 index 7bfac51..0000000 --- a/third_party/gyp/test/rules-rebuild/src/prog2.in +++ /dev/null @@ -1,7 +0,0 @@ -#include -#include "prog2.h" - -void prog2(void) -{ - printf("Hello from %s!\n", NAME); -} diff --git a/third_party/gyp/test/rules-rebuild/src/same_target.gyp b/third_party/gyp/test/rules-rebuild/src/same_target.gyp deleted file mode 100644 index 148287f..0000000 --- a/third_party/gyp/test/rules-rebuild/src/same_target.gyp +++ /dev/null @@ -1,32 +0,0 @@ -{ - 'targets': [ - { - 'target_name': 'program', - 'type': 'executable', - 'msvs_cygwin_shell': 0, - 'sources': [ - 'main.c', - 'prog1.in', - 'prog2.in', - ], - 'rules': [ - { - 'rule_name': 'make_sources', - 'extension': 'in', - 'msvs_external_rule': 1, - 'inputs': [ - 'make-sources.py', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c', - '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h', - ], - 'action': [ - 'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)', - ], - 'process_outputs_as_sources': 1, - }, - ], - }, - ], -} diff --git a/third_party/gyp/test/same-gyp-name/gyptest-all.py b/third_party/gyp/test/same-gyp-name/gyptest-all.py deleted file mode 100644 index 7645688..0000000 --- a/third_party/gyp/test/same-gyp-name/gyptest-all.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp that depends on 2 gyp files with the same name. -""" - -import TestGyp - -# This causes a problem on XCode (duplicate ID). -# See http://code.google.com/p/gyp/issues/detail?id=114 -test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make']) - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -expect1 = """\ -Hello from main1.cc -""" - -expect2 = """\ -Hello from main2.cc -""" - -test.run_built_executable('program1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('program2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/third_party/gyp/test/same-gyp-name/gyptest-default.py b/third_party/gyp/test/same-gyp-name/gyptest-default.py deleted file mode 100644 index c1031f8..0000000 --- a/third_party/gyp/test/same-gyp-name/gyptest-default.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp that depends on 2 gyp files with the same name. -""" - -import TestGyp - -# This causes a problem on XCode (duplicate ID). -# See http://code.google.com/p/gyp/issues/detail?id=114 -test = TestGyp.TestGyp(formats=['msvs', 'scons', 'make']) - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', chdir='relocate/src') - -expect1 = """\ -Hello from main1.cc -""" - -expect2 = """\ -Hello from main2.cc -""" - -test.run_built_executable('program1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('program2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/third_party/gyp/test/same-gyp-name/src/all.gyp b/third_party/gyp/test/same-gyp-name/src/all.gyp deleted file mode 100644 index 229f02e..0000000 --- a/third_party/gyp/test/same-gyp-name/src/all.gyp +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'all_exes', - 'type': 'none', - 'dependencies': [ - 'subdir1/executable.gyp:*', - 'subdir2/executable.gyp:*', - ], - }, - ], -} diff --git a/third_party/gyp/test/same-gyp-name/src/subdir1/executable.gyp b/third_party/gyp/test/same-gyp-name/src/subdir1/executable.gyp deleted file mode 100644 index 82483b4..0000000 --- a/third_party/gyp/test/same-gyp-name/src/subdir1/executable.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program1', - 'type': 'executable', - 'sources': [ - 'main1.cc', - ], - }, - ], -} diff --git a/third_party/gyp/test/same-gyp-name/src/subdir1/main1.cc b/third_party/gyp/test/same-gyp-name/src/subdir1/main1.cc deleted file mode 100644 index 3645558..0000000 --- a/third_party/gyp/test/same-gyp-name/src/subdir1/main1.cc +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main() { - printf("Hello from main1.cc\n"); - return 0; -} diff --git a/third_party/gyp/test/same-gyp-name/src/subdir2/executable.gyp b/third_party/gyp/test/same-gyp-name/src/subdir2/executable.gyp deleted file mode 100644 index e353701..0000000 --- a/third_party/gyp/test/same-gyp-name/src/subdir2/executable.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'program2', - 'type': 'executable', - 'sources': [ - 'main2.cc', - ], - }, - ], -} diff --git a/third_party/gyp/test/same-gyp-name/src/subdir2/main2.cc b/third_party/gyp/test/same-gyp-name/src/subdir2/main2.cc deleted file mode 100644 index 0c724de..0000000 --- a/third_party/gyp/test/same-gyp-name/src/subdir2/main2.cc +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main() { - printf("Hello from main2.cc\n"); - return 0; -} diff --git a/third_party/gyp/test/same-name/gyptest-all.py b/third_party/gyp/test/same-name/gyptest-all.py deleted file mode 100644 index 4c21502..0000000 --- a/third_party/gyp/test/same-name/gyptest-all.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp with two targets that share a common .c source file. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', test.ALL, chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello prog1 from func.c -""" - -expect2 = """\ -Hello from prog2.c -Hello prog2 from func.c -""" - -test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/third_party/gyp/test/same-name/gyptest-default.py b/third_party/gyp/test/same-name/gyptest-default.py deleted file mode 100644 index 98757c2..0000000 --- a/third_party/gyp/test/same-name/gyptest-default.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Build a .gyp with two targets that share a common .c source file. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('all.gyp', chdir='relocate/src') - -expect1 = """\ -Hello from prog1.c -Hello prog1 from func.c -""" - -expect2 = """\ -Hello from prog2.c -Hello prog2 from func.c -""" - -test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1) -test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2) - -test.pass_test() diff --git a/third_party/gyp/test/same-name/src/all.gyp b/third_party/gyp/test/same-name/src/all.gyp deleted file mode 100644 index 44e1049..0000000 --- a/third_party/gyp/test/same-name/src/all.gyp +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'defines': [ - 'PROG="prog1"', - ], - 'sources': [ - 'prog1.c', - 'func.c', - # Uncomment to test same-named files in different directories, - # which Visual Studio doesn't support. - #'subdir1/func.c', - #'subdir2/func.c', - ], - }, - { - 'target_name': 'prog2', - 'type': 'executable', - 'defines': [ - 'PROG="prog2"', - ], - 'sources': [ - 'prog2.c', - 'func.c', - # Uncomment to test same-named files in different directories, - # which Visual Studio doesn't support. - #'subdir1/func.c', - #'subdir2/func.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/same-name/src/func.c b/third_party/gyp/test/same-name/src/func.c deleted file mode 100644 index e069c69..0000000 --- a/third_party/gyp/test/same-name/src/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void func(void) -{ - printf("Hello %s from func.c\n", PROG); -} diff --git a/third_party/gyp/test/same-name/src/prog1.c b/third_party/gyp/test/same-name/src/prog1.c deleted file mode 100644 index c8940fe..0000000 --- a/third_party/gyp/test/same-name/src/prog1.c +++ /dev/null @@ -1,16 +0,0 @@ -#include - -extern void func(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - func(); - /* - * Uncomment to test same-named files in different directories, - * which Visual Studio doesn't support. - subdir1_func(); - subdir2_func(); - */ - return 0; -} diff --git a/third_party/gyp/test/same-name/src/prog2.c b/third_party/gyp/test/same-name/src/prog2.c deleted file mode 100644 index e6605c2..0000000 --- a/third_party/gyp/test/same-name/src/prog2.c +++ /dev/null @@ -1,16 +0,0 @@ -#include - -extern void func(void); - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - func(); - /* - * Uncomment to test same-named files in different directories, - * which Visual Studio doesn't support. - subdir1_func(); - subdir2_func(); - */ - return 0; -} diff --git a/third_party/gyp/test/same-name/src/subdir1/func.c b/third_party/gyp/test/same-name/src/subdir1/func.c deleted file mode 100644 index b73450d..0000000 --- a/third_party/gyp/test/same-name/src/subdir1/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void subdir1_func(void) -{ - printf("Hello %s from subdir1/func.c\n", PROG); -} diff --git a/third_party/gyp/test/same-name/src/subdir2/func.c b/third_party/gyp/test/same-name/src/subdir2/func.c deleted file mode 100644 index 0248b57..0000000 --- a/third_party/gyp/test/same-name/src/subdir2/func.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -void subdir2_func(void) -{ - printf("Hello %s from subdir2/func.c\n", PROG); -} diff --git a/third_party/gyp/test/scons_tools/gyptest-tools.py b/third_party/gyp/test/scons_tools/gyptest-tools.py deleted file mode 100755 index e97f5e6..0000000 --- a/third_party/gyp/test/scons_tools/gyptest-tools.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that a scons build picks up tools modules specified -via 'scons_tools' in the 'scons_settings' dictionary. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('tools.gyp') - -test.build('tools.gyp', test.ALL) - -if test.format == 'scons': - expect = "Hello, world!\n" -else: - expect = "" -test.run_built_executable('tools', stdout=expect) - -test.pass_test() diff --git a/third_party/gyp/test/scons_tools/site_scons/site_tools/this_tool.py b/third_party/gyp/test/scons_tools/site_scons/site_tools/this_tool.py deleted file mode 100644 index 10c8947..0000000 --- a/third_party/gyp/test/scons_tools/site_scons/site_tools/this_tool.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# SCons "tool" module that simply sets a -D value. -def generate(env): - env['CPPDEFINES'] = ['THIS_TOOL'] - -def exists(env): - pass diff --git a/third_party/gyp/test/scons_tools/tools.c b/third_party/gyp/test/scons_tools/tools.c deleted file mode 100644 index 78dc0e3..0000000 --- a/third_party/gyp/test/scons_tools/tools.c +++ /dev/null @@ -1,13 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -int main(int argc, char *argv[]) -{ -#ifdef THIS_TOOL - printf("Hello, world!\n"); -#endif - return 0; -} diff --git a/third_party/gyp/test/scons_tools/tools.gyp b/third_party/gyp/test/scons_tools/tools.gyp deleted file mode 100644 index 736ba3f..0000000 --- a/third_party/gyp/test/scons_tools/tools.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'tools', - 'type': 'executable', - 'sources': [ - 'tools.c', - ], - }, - ], - 'scons_settings': { - 'tools': ['default', 'this_tool'], - }, -} diff --git a/third_party/gyp/test/sibling/gyptest-all.py b/third_party/gyp/test/sibling/gyptest-all.py deleted file mode 100644 index c04c2d4..0000000 --- a/third_party/gyp/test/sibling/gyptest-all.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('build/all.gyp', chdir='src') - -test.build('build/all.gyp', test.ALL, chdir='src') - -chdir = 'src/build' - -# The top-level Makefile is in the directory where gyp was run. -# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp -# file? What about when passing in multiple .gyp files? Would sub-project -# Makefiles (see http://codereview.chromium.org/340008 comments) solve this? -if test.format == 'make': - chdir = 'src' - -if test.format == 'xcode': - chdir = 'src/prog1' -test.run_built_executable('prog1', - chdir=chdir, - stdout="Hello from prog1.c\n") - -if test.format == 'xcode': - chdir = 'src/prog2' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/sibling/gyptest-relocate.py b/third_party/gyp/test/sibling/gyptest-relocate.py deleted file mode 100644 index 176545f..0000000 --- a/third_party/gyp/test/sibling/gyptest-relocate.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('build/all.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('build/all.gyp', test.ALL, chdir='relocate/src') - -chdir = 'relocate/src/build' - -# The top-level Makefile is in the directory where gyp was run. -# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp -# file? What about when passing in multiple .gyp files? Would sub-project -# Makefiles (see http://codereview.chromium.org/340008 comments) solve this? -if test.format == 'make': - chdir = 'relocate/src' - -if test.format == 'xcode': - chdir = 'relocate/src/prog1' -test.run_built_executable('prog1', - chdir=chdir, - stdout="Hello from prog1.c\n") - -if test.format == 'xcode': - chdir = 'relocate/src/prog2' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/sibling/src/build/all.gyp b/third_party/gyp/test/sibling/src/build/all.gyp deleted file mode 100644 index 6eafdf9..0000000 --- a/third_party/gyp/test/sibling/src/build/all.gyp +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - # TODO(sgk): a target name of 'all' leads to a scons dependency cycle - 'target_name': 'All', - 'type': 'none', - 'dependencies': [ - '../prog1/prog1.gyp:*', - '../prog2/prog2.gyp:*', - ], - }, - ], -} diff --git a/third_party/gyp/test/sibling/src/prog1/prog1.c b/third_party/gyp/test/sibling/src/prog1/prog1.c deleted file mode 100644 index 161ae8a..0000000 --- a/third_party/gyp/test/sibling/src/prog1/prog1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - return 0; -} diff --git a/third_party/gyp/test/sibling/src/prog1/prog1.gyp b/third_party/gyp/test/sibling/src/prog1/prog1.gyp deleted file mode 100644 index fbe38b9..0000000 --- a/third_party/gyp/test/sibling/src/prog1/prog1.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/sibling/src/prog2/prog2.c b/third_party/gyp/test/sibling/src/prog2/prog2.c deleted file mode 100644 index 7635ae8..0000000 --- a/third_party/gyp/test/sibling/src/prog2/prog2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - return 0; -} diff --git a/third_party/gyp/test/sibling/src/prog2/prog2.gyp b/third_party/gyp/test/sibling/src/prog2/prog2.gyp deleted file mode 100644 index 5934548..0000000 --- a/third_party/gyp/test/sibling/src/prog2/prog2.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/subdirectory/gyptest-SYMROOT-all.py b/third_party/gyp/test/subdirectory/gyptest-SYMROOT-all.py deleted file mode 100644 index b750904..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-SYMROOT-all.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -The configuration sets the Xcode SYMROOT variable and uses --depth= -to make Xcode behave like the other build tools--that is, put all -built targets in a single output build directory at the top of the tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src') - -test.relocate('src', 'relocate/src') - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') -test.run_built_executable('prog2', - stdout="Hello from prog2.c\n", - chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-SYMROOT-default.py b/third_party/gyp/test/subdirectory/gyptest-SYMROOT-default.py deleted file mode 100644 index c64ae7d..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-SYMROOT-default.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -The configuration sets the Xcode SYMROOT variable and uses --depth= -to make Xcode behave like the other build tools--that is, put all -built targets in a single output build directory at the top of the tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src') - -test.relocate('src', 'relocate/src') - -# Suppress the test infrastructure's setting SYMROOT on the command line. -test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -test.run_built_executable('prog2', - stdout="Hello from prog2.c\n", - chdir='relocate/src') - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-subdir-all.py b/third_party/gyp/test/subdirectory/gyptest-subdir-all.py deleted file mode 100644 index fbaef32..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-subdir-all.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a subsidiary dependent target from a .gyp file in a -subdirectory, without specifying an explicit output build directory, -and using the subdirectory's solution or project file as the entry point. -""" - -import TestGyp -import errno - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -chdir = 'relocate/src/subdir' -target = test.ALL - -test.build('prog2.gyp', target, chdir=chdir) - -test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir) - -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-subdir-default.py b/third_party/gyp/test/subdirectory/gyptest-subdir-default.py deleted file mode 100644 index 6372ea2..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-subdir-default.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a subsidiary dependent target from a .gyp file in a -subdirectory, without specifying an explicit output build directory, -and using the subdirectory's solution or project file as the entry point. -""" - -import TestGyp -import errno - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -chdir = 'relocate/src/subdir' - -test.build('prog2.gyp', chdir=chdir) - -test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir) - -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-subdir2-deep.py b/third_party/gyp/test/subdirectory/gyptest-subdir2-deep.py deleted file mode 100644 index 4854898..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-subdir2-deep.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a project rooted several layers under src_dir works. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2') - -test.relocate('src', 'relocate/src') - -test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2') - -test.run_built_executable('prog3', - chdir='relocate/src/subdir/subdir2', - stdout="Hello from prog3.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-top-all.py b/third_party/gyp/test/subdirectory/gyptest-top-all.py deleted file mode 100644 index a29a41b..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-top-all.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -There is a difference here in the default behavior of the underlying -build tools. Specifically, when building the entire "solution", Xcode -puts the output of each project relative to the .xcodeproj directory, -while Visual Studio (and our implementations of SCons and Make) put it -in a build directory relative to the "solution"--that is, the entry-point -from which you built the entire tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('prog1.gyp', test.ALL, chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir' -else: - chdir = 'relocate/src' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/gyptest-top-default.py b/third_party/gyp/test/subdirectory/gyptest-top-default.py deleted file mode 100644 index ac5f60d..0000000 --- a/third_party/gyp/test/subdirectory/gyptest-top-default.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a target and a subsidiary dependent target from a -.gyp file in a subdirectory, without specifying an explicit output build -directory, and using the generated solution or project file at the top -of the tree as the entry point. - -There is a difference here in the default behavior of the underlying -build tools. Specifically, when building the entire "solution", Xcode -puts the output of each project relative to the .xcodeproj directory, -while Visual Studio (and our implementations of SCons and Make) put it -in a build directory relative to the "solution"--that is, the entry-point -from which you built the entire tree. -""" - -import TestGyp - -test = TestGyp.TestGyp() - -test.run_gyp('prog1.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('prog1.gyp', chdir='relocate/src') - -test.run_built_executable('prog1', - stdout="Hello from prog1.c\n", - chdir='relocate/src') - -if test.format == 'xcode': - chdir = 'relocate/src/subdir' -else: - chdir = 'relocate/src' -test.run_built_executable('prog2', - chdir=chdir, - stdout="Hello from prog2.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/subdirectory/src/prog1.c b/third_party/gyp/test/subdirectory/src/prog1.c deleted file mode 100644 index 161ae8a..0000000 --- a/third_party/gyp/test/subdirectory/src/prog1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - return 0; -} diff --git a/third_party/gyp/test/subdirectory/src/prog1.gyp b/third_party/gyp/test/subdirectory/src/prog1.gyp deleted file mode 100644 index 2aa66ce..0000000 --- a/third_party/gyp/test/subdirectory/src/prog1.gyp +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - 'symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'dependencies': [ - 'subdir/prog2.gyp:prog2', - ], - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/subdirectory/src/subdir/prog2.c b/third_party/gyp/test/subdirectory/src/subdir/prog2.c deleted file mode 100644 index 7635ae8..0000000 --- a/third_party/gyp/test/subdirectory/src/subdir/prog2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - return 0; -} diff --git a/third_party/gyp/test/subdirectory/src/subdir/prog2.gyp b/third_party/gyp/test/subdirectory/src/subdir/prog2.gyp deleted file mode 100644 index c6cd35f..0000000 --- a/third_party/gyp/test/subdirectory/src/subdir/prog2.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.c b/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.c deleted file mode 100644 index 7cfb0fa..0000000 --- a/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog3.c\n"); - return 0; -} diff --git a/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp b/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp deleted file mode 100644 index b49fb59..0000000 --- a/third_party/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'includes': [ - '../../symroot.gypi', - ], - 'targets': [ - { - 'target_name': 'prog3', - 'type': 'executable', - 'sources': [ - 'prog3.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/subdirectory/src/symroot.gypi b/third_party/gyp/test/subdirectory/src/symroot.gypi deleted file mode 100644 index 5199164..0000000 --- a/third_party/gyp/test/subdirectory/src/symroot.gypi +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'set_symroot%': 0, - }, - 'conditions': [ - ['set_symroot == 1', { - 'xcode_settings': { - 'SYMROOT': '<(DEPTH)/build', - }, - }], - ], -} diff --git a/third_party/gyp/test/toolsets/gyptest-toolsets.py b/third_party/gyp/test/toolsets/gyptest-toolsets.py deleted file mode 100644 index 19737f8..0000000 --- a/third_party/gyp/test/toolsets/gyptest-toolsets.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies that toolsets are correctly applied -""" - -import TestGyp - -# Multiple toolsets are currently only supported by the make generator. -test = TestGyp.TestGyp(formats=['make']) - -test.run_gyp('toolsets.gyp') - -test.build('toolsets.gyp', test.ALL) - -test.run_built_executable('host-main', stdout="Host\n") -test.run_built_executable('target-main', stdout="Target\n") - -test.pass_test() diff --git a/third_party/gyp/test/toolsets/main.cc b/third_party/gyp/test/toolsets/main.cc deleted file mode 100644 index 0f353ae..0000000 --- a/third_party/gyp/test/toolsets/main.cc +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -#include - -const char *GetToolset(); - -int main(int argc, char *argv[]) { - printf("%s\n", GetToolset()); -} diff --git a/third_party/gyp/test/toolsets/toolsets.cc b/third_party/gyp/test/toolsets/toolsets.cc deleted file mode 100644 index a45fa02..0000000 --- a/third_party/gyp/test/toolsets/toolsets.cc +++ /dev/null @@ -1,11 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -const char *GetToolset() { -#ifdef TARGET - return "Target"; -#else - return "Host"; -#endif -} diff --git a/third_party/gyp/test/toolsets/toolsets.gyp b/third_party/gyp/test/toolsets/toolsets.gyp deleted file mode 100644 index e41b928..0000000 --- a/third_party/gyp/test/toolsets/toolsets.gyp +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'target_defaults': { - 'target_conditions': [ - ['_toolset=="target"', {'defines': ['TARGET']}] - ] - }, - 'targets': [ - { - 'target_name': 'toolsets', - 'type': 'static_library', - 'toolsets': ['target', 'host'], - 'sources': [ - 'toolsets.cc', - ], - }, - { - 'target_name': 'host-main', - 'type': 'executable', - 'toolsets': ['host'], - 'dependencies': ['toolsets'], - 'sources': [ - 'main.cc', - ], - }, - { - 'target_name': 'target-main', - 'type': 'executable', - 'dependencies': ['toolsets'], - 'sources': [ - 'main.cc', - ], - }, - ], -} diff --git a/third_party/gyp/test/toplevel-dir/gyptest-toplevel-dir.py b/third_party/gyp/test/toplevel-dir/gyptest-toplevel-dir.py deleted file mode 100644 index 61986cd..0000000 --- a/third_party/gyp/test/toplevel-dir/gyptest-toplevel-dir.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verifies building a subsidiary dependent target from a .gyp file in a -subdirectory, without specifying an explicit output build directory, -and using the subdirectory's solution or project file as the entry point. -""" - -import TestGyp -import errno - -test = TestGyp.TestGyp(formats=['make']) - -# We want our Makefile to be one dir up from main.gyp. -test.run_gyp('main.gyp', '--toplevel-dir=..', chdir='src/sub1') - -toplevel_dir = 'src' - -test.build('all', chdir=toplevel_dir) - -test.built_file_must_exist('prog1', type=test.EXECUTABLE, chdir=toplevel_dir) - -test.run_built_executable('prog1', - chdir=toplevel_dir, - stdout="Hello from prog1.c\n") - -test.pass_test() diff --git a/third_party/gyp/test/toplevel-dir/src/sub1/main.gyp b/third_party/gyp/test/toplevel-dir/src/sub1/main.gyp deleted file mode 100644 index 3321901..0000000 --- a/third_party/gyp/test/toplevel-dir/src/sub1/main.gyp +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog1', - 'type': 'executable', - 'dependencies': [ - '<(DEPTH)/../sub2/prog2.gyp:prog2', - ], - 'sources': [ - 'prog1.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/toplevel-dir/src/sub1/prog1.c b/third_party/gyp/test/toplevel-dir/src/sub1/prog1.c deleted file mode 100644 index 161ae8a..0000000 --- a/third_party/gyp/test/toplevel-dir/src/sub1/prog1.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog1.c\n"); - return 0; -} diff --git a/third_party/gyp/test/toplevel-dir/src/sub2/prog2.c b/third_party/gyp/test/toplevel-dir/src/sub2/prog2.c deleted file mode 100644 index 7635ae8..0000000 --- a/third_party/gyp/test/toplevel-dir/src/sub2/prog2.c +++ /dev/null @@ -1,7 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ - printf("Hello from prog2.c\n"); - return 0; -} diff --git a/third_party/gyp/test/toplevel-dir/src/sub2/prog2.gyp b/third_party/gyp/test/toplevel-dir/src/sub2/prog2.gyp deleted file mode 100644 index 5934548..0000000 --- a/third_party/gyp/test/toplevel-dir/src/sub2/prog2.gyp +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'prog2', - 'type': 'executable', - 'sources': [ - 'prog2.c', - ], - }, - ], -} diff --git a/third_party/gyp/test/variables/commands/commands-repeated.gyp b/third_party/gyp/test/variables/commands/commands-repeated.gyp deleted file mode 100644 index 822ae4f..0000000 --- a/third_party/gyp/test/variables/commands/commands-repeated.gyp +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This is a simple test file to make sure that variable substitution -# happens correctly. Run "run_tests.py" using python to generate the -# output from this gyp file. - -{ - 'variables': { - 'pi': 'import math; print math.pi', - 'third_letters': "<(other_letters)HIJK", - 'letters_list': 'ABCD', - 'other_letters': '<(letters_list)EFG', - 'check_included': '<(included_variable)', - 'check_lists': [ - '<(included_variable)', - '<(third_letters)', - ], - 'check_int': 5, - 'check_str_int': '6', - 'check_list_int': [ - 7, - '8', - 9, - ], - 'not_int_1': ' 10', - 'not_int_2': '11 ', - 'not_int_3': '012', - 'not_int_4': '13.0', - 'not_int_5': '+14', - 'negative_int': '-15', - 'zero_int': '0', - }, - 'includes': [ - 'commands.gypi', - ], - 'targets': [ - { - 'target_name': 'foo', - 'type': 'none', - 'variables': { - 'var1': ' commands.gyp.stdout -python ../../../gyp --ignore-environment --debug variables --debug general --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout -cp -f commands.gypd commands.gypd.golden -python ../../../gyp --debug variables --debug general --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout -cp -f commands-repeated.gypd commands-repeated.gypd.golden diff --git a/third_party/gyp/test/variables/filelist/filelist.gyp.stdout b/third_party/gyp/test/variables/filelist/filelist.gyp.stdout deleted file mode 100644 index f541267..0000000 --- a/third_party/gyp/test/variables/filelist/filelist.gyp.stdout +++ /dev/null @@ -1,174 +0,0 @@ -GENERAL: running with these options: -GENERAL: check: None -GENERAL: circular_check: True -GENERAL: debug: ['variables', 'general'] -GENERAL: defines: None -GENERAL: depth: '.' -GENERAL: formats: ['gypd'] -GENERAL: generator_flags: [] -GENERAL: generator_output: None -GENERAL: includes: None -GENERAL: msvs_version: None -GENERAL: suffix: '' -GENERAL: toplevel_dir: None -GENERAL: use_environment: True -GENERAL: cmdline_default_variables: {} -GENERAL: generator_flags: {} -VARIABLES: Expanding 'exclude' to 'exclude' -VARIABLES: Expanding 'Sch.*' to 'Sch.*' -VARIABLES: Expanding 'include' to 'include' -VARIABLES: Expanding '.*dt' to '.*dt' -VARIABLES: Expanding 'exclude' to 'exclude' -VARIABLES: Expanding 'Jer.*' to 'Jer.*' -VARIABLES: Expanding 'John' to 'John' -VARIABLES: Expanding 'Jacob' to 'Jacob' -VARIABLES: Expanding 'Astor' to 'Astor' -VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer' -VARIABLES: Expanding 'Jerome' to 'Jerome' -VARIABLES: Expanding 'Schmidt' to 'Schmidt' -VARIABLES: Expanding 'Schultz' to 'Schultz' -VARIABLES: Expanding 'Astor' to 'Astor' -VARIABLES: Expanding '.' to '.' -VARIABLES: Matches: {'content': 'names.txt <@(names', 'is_array': '', 'type': '<|', 'replace': '<|(names.txt <@(names)'} -VARIABLES: Matches: {'content': 'names', 'is_array': '', 'type': '<@', 'replace': '<@(names)'} -VARIABLES: Expanding 'names' to 'names' -VARIABLES: Expanding 'John' to 'John' -VARIABLES: Expanding 'Jacob' to 'Jacob' -VARIABLES: Expanding 'Jingleheimer' to 'Jingleheimer' -VARIABLES: Expanding 'Schmidt' to 'Schmidt' -VARIABLES: Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing. -VARIABLES: Expanding 'names.txt John Jacob Jingleheimer Schmidt' to 'names.txt John Jacob Jingleheimer Schmidt' -VARIABLES: Expanding 'names.txt <@(names)' to 'names.txt John Jacob Jingleheimer Schmidt' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<|(names.txt <@(names))' to 'names.txt' -VARIABLES: Expanding 'foo' to 'foo' -VARIABLES: Expanding 'target' to 'target' -VARIABLES: Expanding 'none' to 'none' -VARIABLES: Expanding 'test_action' to 'test_action' -VARIABLES: Expanding 'python' to 'python' -VARIABLES: Expanding 'dummy.py' to 'dummy.py' -VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'} -VARIABLES: Expanding 'names_listfile' to 'names_listfile' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<(names_listfile)' to 'names.txt' -VARIABLES: Matches: {'content': 'names_listfile', 'is_array': '', 'type': '<', 'replace': '<(names_listfile)'} -VARIABLES: Expanding 'names_listfile' to 'names_listfile' -VARIABLES: Found output 'names.txt', recursing. -VARIABLES: Expanding 'names.txt' to 'names.txt' -VARIABLES: Expanding '<(names_listfile)' to 'names.txt' -VARIABLES: Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'type': ' filelist.gyp.stdout -cp -f src/filelist.gypd filelist.gypd.golden diff --git a/third_party/gyp/test/variants/gyptest-variants.py b/third_party/gyp/test/variants/gyptest-variants.py deleted file mode 100644 index ce2455f..0000000 --- a/third_party/gyp/test/variants/gyptest-variants.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -Verify handling of build variants. - -TODO: Right now, only the SCons generator supports this, so the -test case is SCons-specific. In particular, it relise on SCons' -ability to rebuild in response to changes on the command line. It -may be simpler to just drop this feature if the other generators -can't be made to behave the same way. -""" - -import TestGyp - -test = TestGyp.TestGyp(formats=['scons']) - -test.run_gyp('variants.gyp', chdir='src') - -test.relocate('src', 'relocate/src') - -test.build('variants.gyp', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello, world!\n") - -test.sleep() -test.build('variants.gyp', 'VARIANT1=1', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello from VARIANT1\n") - -test.sleep() -test.build('variants.gyp', 'VARIANT2=1', chdir='relocate/src') - -test.run_built_executable('variants', - chdir='relocate/src', - stdout="Hello from VARIANT2\n") - -test.pass_test() diff --git a/third_party/gyp/test/variants/src/variants.c b/third_party/gyp/test/variants/src/variants.c deleted file mode 100644 index 3018e40..0000000 --- a/third_party/gyp/test/variants/src/variants.c +++ /dev/null @@ -1,13 +0,0 @@ -#include - -int main(int argc, char *argv[]) -{ -#if defined(VARIANT1) - printf("Hello from VARIANT1\n"); -#elif defined(VARIANT2) - printf("Hello from VARIANT2\n"); -#else - printf("Hello, world!\n"); -#endif - return 0; -} diff --git a/third_party/gyp/test/variants/src/variants.gyp b/third_party/gyp/test/variants/src/variants.gyp deleted file mode 100644 index 0305ca7..0000000 --- a/third_party/gyp/test/variants/src/variants.gyp +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'targets': [ - { - 'target_name': 'variants', - 'type': 'executable', - 'sources': [ - 'variants.c', - ], - 'variants': { - 'variant1' : { - 'defines': [ - 'VARIANT1', - ], - }, - 'variant2' : { - 'defines': [ - 'VARIANT2', - ], - }, - }, - }, - ], -} diff --git a/third_party/gyp/tools/README b/third_party/gyp/tools/README deleted file mode 100644 index 712e4ef..0000000 --- a/third_party/gyp/tools/README +++ /dev/null @@ -1,15 +0,0 @@ -pretty_vcproj: - Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] - - They key/value pair are used to resolve vsprops name. - - For example, if I want to diff the base.vcproj project: - - pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt - pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt - - And you can use your favorite diff tool to see the changes. - - Note: In the case of base.vcproj, the original vcproj is one level up the generated one. - I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt - before you perform the diff. \ No newline at end of file diff --git a/third_party/gyp/tools/pretty_gyp.py b/third_party/gyp/tools/pretty_gyp.py deleted file mode 100644 index 04c7901..0000000 --- a/third_party/gyp/tools/pretty_gyp.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2009 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# This file pretty-prints the contents of a GYP file. - -import sys -import re - -input = [] -if len(sys.argv) > 1: - input_file = open(sys.argv[1]) - input = input_file.read().splitlines() - input_file.close() -else: - input = sys.stdin.read().splitlines() - -# This is used to remove comments when we're counting braces. -comment_re = re.compile(r'\s*#.*') - -# This is used to remove quoted strings when we're counting braces. -# It takes into account quoted quotes, and makes sure that the quotes -# match. -# NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. -quote_re_str = r'(?P[\'"])(.*?)(? 0: - after = True - - # This catches the special case of a closing brace having something - # other than just whitespace ahead of it -- we don't want to - # unindent that until after this line is printed so it stays with - # the previous indentation level. - if cnt < 0 and closing_prefix_re.match(stripline): - after = True - return (cnt, after) - -# This does the main work of indenting the input based on the brace counts. -def prettyprint_input(lines): - indent = 0 - basic_offset = 2 - last_line = "" - for line in lines: - if comment_re.match(line): - print line - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: - (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print " " * (basic_offset * indent) + line - indent += brace_diff - else: - indent += brace_diff - print " " * (basic_offset * indent) + line - else: - print " " * (basic_offset * indent) + line - else: - print "" - last_line = line - -# Split up the double braces. -lines = split_double_braces(input) - -# Indent and print the output. -prettyprint_input(lines) diff --git a/third_party/gyp/tools/pretty_sln.py b/third_party/gyp/tools/pretty_sln.py deleted file mode 100755 index 0741fff..0000000 --- a/third_party/gyp/tools/pretty_sln.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/python2.5 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Prints the information in a sln file in a diffable way. - - It first outputs each projects in alphabetical order with their - dependencies. - - Then it outputs a possible build order. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import re -import sys -import pretty_vcproj - -def BuildProject(project, built, projects, deps): - # if all dependencies are done, we can build it, otherwise we try to build the - # dependency. - # This is not infinite-recursion proof. - for dep in deps[project]: - if dep not in built: - BuildProject(dep, built, projects, deps) - print project - built.append(project) - -def ParseSolution(solution_file): - # All projects, their clsid and paths. - projects = dict() - - # A list of dependencies associated with a project. - dependencies = dict() - - # Regular expressions that matches the SLN format. - # The first line of a project definition. - begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' - '}"\) = "(.*)", "(.*)", "(.*)"$')) - # The last line of a project definition. - end_project = re.compile('^EndProject$') - # The first line of a dependency list. - begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$') - # The last line of a dependency list. - end_dep = re.compile('EndProjectSection$') - # A line describing a dependency. - dep_line = re.compile(' *({.*}) = ({.*})$') - - in_deps = False - solution = open(solution_file) - for line in solution: - results = begin_project.search(line) - if results: - # Hack to remove icu because the diff is too different. - if results.group(1).find('icu') != -1: - continue - # We remove "_gyp" from the names because it helps to diff them. - current_project = results.group(1).replace('_gyp', '') - projects[current_project] = [results.group(2).replace('_gyp', ''), - results.group(3), - results.group(2)] - dependencies[current_project] = [] - continue - - results = end_project.search(line) - if results: - current_project = None - continue - - results = begin_dep.search(line) - if results: - in_deps = True - continue - - results = end_dep.search(line) - if results: - in_deps = False - continue - - results = dep_line.search(line) - if results and in_deps and current_project: - dependencies[current_project].append(results.group(1)) - continue - - # Change all dependencies clsid to name instead. - for project in dependencies: - # For each dependencies in this project - new_dep_array = [] - for dep in dependencies[project]: - # Look for the project name matching this cldis - for project_info in projects: - if projects[project_info][1] == dep: - new_dep_array.append(project_info) - dependencies[project] = sorted(new_dep_array) - - return (projects, dependencies) - -def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" - - for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] - if dep_list: - for dep in dep_list: - print " - %s" % dep - print "" - - print "-- --" - -def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" - - built = [] - for (project, dep_list) in sorted(deps.items()): - if project not in built: - BuildProject(project, built, projects, deps) - - print "-- --" - -def PrintVCProj(projects): - - for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" - - project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), - projects[project][2])) - - pretty = pretty_vcproj - argv = [ '', - project_path, - '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), - ] - argv.extend(sys.argv[3:]) - pretty.main(argv) - -def main(): - # check if we have exactly 1 parameter. - if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] - return - - (projects, deps) = ParseSolution(sys.argv[1]) - PrintDependencies(projects, deps) - PrintBuildOrder(projects, deps) - - if '--recursive' in sys.argv: - PrintVCProj(projects) - -if __name__ == '__main__': - main() - diff --git a/third_party/gyp/tools/pretty_vcproj.py b/third_party/gyp/tools/pretty_vcproj.py deleted file mode 100755 index 292a39f..0000000 --- a/third_party/gyp/tools/pretty_vcproj.py +++ /dev/null @@ -1,316 +0,0 @@ -#!/usr/bin/python2.5 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Make the format of a vcproj really pretty. - - This script normalize and sort an xml. It also fetches all the properties - inside linked vsprops and include them explicitly in the vcproj. - - It outputs the resulting xml to stdout. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import sys - -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -REPLACEMENTS = dict() -ARGUMENTS = None - -class CmpTuple: - """Compare function between 2 tuple.""" - def __call__(self, x, y): - (key1, value1) = x - (key2, value2) = y - return cmp(key1, key2) - -class CmpNode: - """Compare function between 2 xml nodes.""" - - def get_string(self, node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") - - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) - - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value - - return node_string - - def __call__(self, x, y): - return cmp(self.get_string(x), self.get_string(y)) - -def PrettyPrintNode(node, indent=0): - if node.nodeType == Node.TEXT_NODE: - if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) - return - - if node.childNodes: - node.normalize() - # Get the number of attributes - attr_count = 0 - if node.attributes: - attr_count = node.attributes.length - - # Print the main tag - if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) - else: - print '%s<%s' % (' '*indent, node.nodeName) - - all_attributes = [] - for (name, value) in node.attributes.items(): - all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) - for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) - if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) - - for sub_node in node.childNodes: - PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) - -def FlattenFilter(node): - """Returns a list of all the node and sub nodes.""" - node_list = [] - - if (node.attributes and - node.getAttribute('Name') == '_excluded_files'): - # We don't add the "_excluded_files" filter. - return [] - - for current in node.childNodes: - if current.nodeName == 'Filter': - node_list.extend(FlattenFilter(current)) - else: - node_list.append(current) - - return node_list - -def FixFilenames(filenames, current_directory): - new_list = [] - for filename in filenames: - if filename: - for key in REPLACEMENTS: - filename = filename.replace(key, REPLACEMENTS[key]) - os.chdir(current_directory) - filename = filename.strip('"\' ') - if filename.startswith('$'): - new_list.append(filename) - else: - new_list.append(os.path.abspath(filename)) - return new_list - -def AbsoluteNode(node): - # Make all the properties we know about in this node absolute. - if node.attributes: - for (name, value) in node.attributes.items(): - if name in ['InheritedPropertySheets', 'RelativePath', - 'AdditionalIncludeDirectories', - 'IntermediateDirectory', 'OutputDirectory', - 'AdditionalLibraryDirectories']: - # We want to fix up these paths - path_list = value.split(';') - new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) - node.setAttribute(name, ';'.join(new_list)) - if not value: - node.removeAttribute(name) - -def CleanupVcproj(node): - # For each sub node, we call recursively this function. - for sub_node in node.childNodes: - AbsoluteNode(sub_node) - CleanupVcproj(sub_node) - - # Normalize the node, and remove all extranous whitespaces. - for sub_node in node.childNodes: - if sub_node.nodeType == Node.TEXT_NODE: - sub_node.data = sub_node.data.replace("\r", "") - sub_node.data = sub_node.data.replace("\n", "") - sub_node.data = sub_node.data.rstrip() - - # Fix all the semicolon separated attributes to be sorted, and we also - # remove the dups. - if node.attributes: - for (name, value) in node.attributes.items(): - sorted_list = sorted(value.split(';')) - unique_list = [] - [unique_list.append(i) for i in sorted_list if not unique_list.count(i)] - node.setAttribute(name, ';'.join(unique_list)) - if not value: - node.removeAttribute(name) - - if node.childNodes: - node.normalize() - - # For each node, take a copy, and remove it from the list. - node_array = [] - while node.childNodes and node.childNodes[0]: - # Take a copy of the node and remove it from the list. - current = node.childNodes[0] - node.removeChild(current) - - # If the child is a filter, we want to append all its children - # to this same list. - if current.nodeName == 'Filter': - node_array.extend(FlattenFilter(current)) - else: - node_array.append(current) - - - # Sort the list. - node_array.sort(CmpNode()) - - # Insert the nodes in the correct order. - for new_node in node_array: - # But don't append empty tool node. - if new_node.nodeName == 'Tool': - if new_node.attributes and new_node.attributes.length == 1: - # This one was empty. - continue - if new_node.nodeName == 'UserMacro': - continue - node.appendChild(new_node) - -def GetConfiguationNodes(vcproj): - #TODO(nsylvain): Find a better way to navigate the xml. - nodes = [] - for node in vcproj.childNodes: - if node.nodeName == "Configurations": - for sub_node in node.childNodes: - if sub_node.nodeName == "Configuration": - nodes.append(sub_node) - - return nodes - -def GetChildrenVsprops(filename): - dom = parse(filename) - if dom.documentElement.attributes: - vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') - return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) - return [] - -def SeekToNode(node1, child2): - # A text node does not have properties. - if child2.nodeType == Node.TEXT_NODE: - return None - - # Get the name of the current node. - current_name = child2.getAttribute("Name") - if not current_name: - # There is no name. We don't know how to merge. - return None - - # Look through all the nodes to find a match. - for sub_node in node1.childNodes: - if sub_node.nodeName == child2.nodeName: - name = sub_node.getAttribute("Name") - if name == current_name: - return sub_node - - # No match. We give up. - return None - -def MergeAttributes(node1, node2): - # No attributes to merge? - if not node2.attributes: - return - - for (name, value2) in node2.attributes.items(): - # Don't merge the 'Name' attribute. - if name == 'Name': - continue - value1 = node1.getAttribute(name) - if value1: - # The attribute exist in the main node. If it's equal, we leave it - # untouched, otherwise we concatenate it. - if value1 != value2: - node1.setAttribute(name, ';'.join([value1, value2])) - else: - # The attribute does nto exist in the main node. We append this one. - node1.setAttribute(name, value2) - - # If the attribute was a property sheet attributes, we remove it, since - # they are useless. - if name == 'InheritedPropertySheets': - node1.removeAttribute(name) - -def MergeProperties(node1, node2): - MergeAttributes(node1, node2) - for child2 in node2.childNodes: - child1 = SeekToNode(node1, child2) - if child1: - MergeProperties(child1, child2) - else: - node1.appendChild(child2.cloneNode(True)) - -def main(argv): - global REPLACEMENTS - global ARGUMENTS - ARGUMENTS = argv - """Main function of this vcproj prettifier.""" - - # check if we have exactly 1 parameter. - if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) - return - - # Parse the keys - for i in range(2, len(argv)): - (key, value) = argv[i].split('=') - REPLACEMENTS[key] = value - - # Open the vcproj and parse the xml. - dom = parse(argv[1]) - - # First thing we need to do is find the Configuration Node and merge them - # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): - # Get the property sheets associated with this configuration. - vsprops = configuration_node.getAttribute('InheritedPropertySheets') - - # Fix the filenames to be absolute. - vsprops_list = FixFilenames(vsprops.strip().split(';'), - os.path.dirname(argv[1])) - - # Extend the list of vsprops with all vsprops contained in the current - # vsprops. - for current_vsprops in vsprops_list: - vsprops_list.extend(GetChildrenVsprops(current_vsprops)) - - # Now that we have all the vsprops, we need to merge them. - for current_vsprops in vsprops_list: - MergeProperties(configuration_node, - parse(current_vsprops).documentElement) - - # Now that everything is merged, we need to cleanup the xml. - CleanupVcproj(dom.documentElement) - - # Finally, we use the prett xml function to print the vcproj back to the - # user. - #print dom.toprettyxml(newl="\n") - PrettyPrintNode(dom.documentElement) - -if __name__ == '__main__': - main(sys.argv) diff --git a/unix/ibus/mozc_engine.cc b/unix/ibus/mozc_engine.cc index 1ac7309..33e8893 100755 --- a/unix/ibus/mozc_engine.cc +++ b/unix/ibus/mozc_engine.cc @@ -65,8 +65,7 @@ const char kMozcSectionName[] = "engine/Mozc"; // Icon path for MozcTool -// TODO(taku): currently, unknown icon is displayed. -const char kMozcToolIconPath[] = "unknown.ico"; +const char kMozcToolIconPath[] = "tool.png"; // for every 5 minutes, call SyncData const uint64 kSyncDataInterval = 5 * 60; @@ -239,14 +238,14 @@ IBusPropList *sub_prop_list = ibus_prop_list_new(); // Create items for the radio menu. - IBusText *label_for_panel = NULL; // e.g. Hiragana letter A. + string icon_path_for_panel; for (size_t i = 0; i < kMozcEnginePropertiesSize; ++i) { const MozcEngineProperty &entry = kMozcEngineProperties[i]; IBusText *label = ibus_text_new_from_static_string(entry.label); IBusPropState state = PROP_STATE_UNCHECKED; if (entry.composition_mode == kMozcEngineInitialCompositionMode) { state = PROP_STATE_CHECKED; - label_for_panel = ibus_text_new_from_static_string(entry.label_for_panel); + icon_path_for_panel = GetIconPath(entry.icon); } IBusProperty *item = ibus_property_new(entry.key, PROP_TYPE_RADIO, @@ -261,13 +260,13 @@ ibus_prop_list_append(sub_prop_list, item); // |sub_prop_list| owns |item| by calling g_object_ref_sink for the |item|. } - DCHECK(label_for_panel) << "All items are disabled by default"; + DCHECK(!icon_path_for_panel.empty()); // The label of |prop_composition_mode_| is shown in the language panel. prop_composition_mode_ = ibus_property_new("CompositionMode", PROP_TYPE_MENU, - label_for_panel, - NULL /* icon */, + NULL /* label */, + icon_path_for_panel.c_str(), NULL /* tooltip */, TRUE /* sensitive */, TRUE /* visible */, @@ -287,11 +286,12 @@ for (size_t i = 0; i < kMozcEngineToolPropertiesSize; ++i) { const MozcEngineToolProperty &entry = kMozcEngineToolProperties[i]; IBusText *label = ibus_text_new_from_static_string(entry.label); + // TODO(yusukes): It would be better to use entry.icon here? IBusProperty *item = ibus_property_new(entry.mode, PROP_TYPE_NORMAL, label, - NULL, - NULL, + NULL /* icon */, + NULL /* tooltip */, TRUE, TRUE, PROP_STATE_UNCHECKED, @@ -381,13 +381,19 @@ } void MozcEngine::Disable(IBusEngine *engine) { - // TODO(mazda): Implement this. + RevertSession(engine); } void MozcEngine::Enable(IBusEngine *engine) { // Launch mozc_server session_->EnsureConnection(); UpdatePreeditMethod(); + + // When ibus-mozc is disabled by the "next input method" hot key, ibus-daemon + // does not call MozcEngine::Disable(). Call RevertSession() here so the + // mozc_server could discard a preedit string before the hot key is pressed + // (crosbug.com/4596). + RevertSession(engine); } void MozcEngine::FocusIn(IBusEngine *engine) { @@ -395,11 +401,7 @@ } void MozcEngine::FocusOut(IBusEngine *engine) { - commands::SessionCommand command; - command.set_type(commands::SessionCommand::REVERT); - commands::Output output; - session_->SendCommand(command, &output); - UpdateAll(engine, output); + RevertSession(engine); SyncData(false); } @@ -471,7 +473,7 @@ // TODO(mazda): Check if this code is necessary // if (!consumed) { // ibus_engine_forward_key_event(engine, keyval, keycode, modifiers); - // } + // } return consumed ? TRUE : FALSE; } @@ -499,17 +501,18 @@ IBusProperty *prop = NULL; #ifndef OS_CHROMEOS - DCHECK(prop_mozc_tool_); - while (prop = ibus_prop_list_get(prop_mozc_tool_->sub_props, i++)) { - if (!g_strcmp0(property_name, prop->key)) { - const MozcEngineToolProperty *entry = - reinterpret_cast( - g_object_get_data(G_OBJECT(prop), kGObjectDataKey)); - DCHECK(entry->mode); - if (!session_->LaunchTool(entry->mode, "")) { - LOG(ERROR) << "cannot launch: " << entry->mode; + if (prop_mozc_tool_) { + while (prop = ibus_prop_list_get(prop_mozc_tool_->sub_props, i++)) { + if (!g_strcmp0(property_name, prop->key)) { + const MozcEngineToolProperty *entry = + reinterpret_cast( + g_object_get_data(G_OBJECT(prop), kGObjectDataKey)); + DCHECK(entry->mode); + if (!session_->LaunchTool(entry->mode, "")) { + LOG(ERROR) << "cannot launch: " << entry->mode; + } + return; } - return; } } #endif @@ -529,9 +532,8 @@ // Update Mozc state. SetCompositionMode(engine, entry->composition_mode); // Update the language panel. - ibus_property_set_label( - prop_composition_mode_, - ibus_text_new_from_static_string(entry->label_for_panel)); + ibus_property_set_icon(prop_composition_mode_, + GetIconPath(entry->icon).c_str()); } // Update the radio menu item. ibus_property_set_state(prop, PROP_STATE_CHECKED); @@ -554,7 +556,7 @@ } void MozcEngine::Reset(IBusEngine *engine) { - // TODO(mazda): Implement this. + RevertSession(engine); } void MozcEngine::SetCapabilities(IBusEngine *engine, @@ -850,5 +852,16 @@ } } +void MozcEngine::RevertSession(IBusEngine *engine) { + commands::SessionCommand command; + command.set_type(commands::SessionCommand::REVERT); + commands::Output output; + if (!session_->SendCommand(command, &output)) { + LOG(ERROR) << "RevertSession() failed"; + return; + } + UpdateAll(engine, output); +} + } // namespace ibus } // namespace mozc diff --git a/unix/ibus/mozc_engine.h b/unix/ibus/mozc_engine.h index cd2a1bc..5467800 100755 --- a/unix/ibus/mozc_engine.h +++ b/unix/ibus/mozc_engine.h @@ -130,6 +130,10 @@ // always calls SyncData. void SyncData(bool force); + // Reverts internal state of mozc_server by sending SessionCommand::REVERT IPC + // message, then hides a preedit string and the candidate window. + void RevertSession(IBusEngine *engine); + uint64 last_sync_time_; scoped_ptr key_translator_; scoped_ptr session_; diff --git a/unix/ibus/mozc_engine_property.h b/unix/ibus/mozc_engine_property.h index b4b14ff..178da9f 100755 --- a/unix/ibus/mozc_engine_property.h +++ b/unix/ibus/mozc_engine_property.h @@ -44,42 +44,49 @@ const char *key; // IBus property key for the mode. const char *label; // text for the radio menu (ibus-anthy compatible). const char *label_for_panel; // text for the language panel. + const char *icon; } kMozcEngineProperties[] = { { commands::DIRECT, "CompositionMode.Direct", "Direct input", "A", + "direct.png", }, { commands::HIRAGANA, "CompositionMode.Hiragana", "Hiragana", "\xe3\x81\x82", // Hiragana letter A + "hiragana.png", }, { commands::FULL_KATAKANA, "CompositionMode.Katakana", "Katakana", "\xe3\x82\xa2", // Katakana letter A + "katakana_full.png", }, { commands::HALF_ASCII, "CompositionMode.Latin", "Latin", "_A", + "alpha_half.png", }, { commands::FULL_ASCII, "CompositionMode.WideLatin", "Wide Latin", "\xef\xbc\xa1", // Full width ASCII letter A + "alpha_full.png", }, { commands::HALF_KATAKANA, "CompositionMode.HalfWidthKatakana", "Half width katakana", "_\xef\xbd\xb1", // Half width Katakana letter A + "katakana_half.png", }, }; @@ -93,21 +100,25 @@ const char *key; // IBus property key for the MozcTool. const char *mode; // command line passed as --mode= const char *label; // text for the menu. + const char *icon; // icon } kMozcEngineToolProperties[] = { { "Tool.ConfigDialog", "config_dialog", - "property" + "Property", + "properties.png", }, { "Tool.DictionaryTool", "dictionary_tool", - "dictionary tool" + "Dictionary tool", + "dictionary.png", }, { "Tool.AboutDialog", "about_dialog", - "About Mozc" + "About Mozc", + NULL, }, }; diff --git a/unix/scim/scim_mozc.cc b/unix/scim/scim_mozc.cc index daad3d7..e908da5 100755 --- a/unix/scim/scim_mozc.cc +++ b/unix/scim/scim_mozc.cc @@ -49,9 +49,9 @@ const char kPropToolDictionary[] = "/Mozc/Tool/dictionary"; const char kPropToolDictionaryIcon[] = SCIM_ICONDIR "/scim-mozc-dictionary.png"; const char kPropToolProperty[] = "/Mozc/Tool/property"; -const char kPropToolPropertyIcon[] = SCIM_ICONDIR "/scim-mozc-property.png"; - -const char kPropCompositionModeIcon[] = "/Mozc/CompositionMode"; +const char kPropToolPropertyIcon[] = SCIM_ICONDIR "/scim-mozc-properties.png"; + +const char kPropCompositionMode[] = "/Mozc/CompositionMode"; const struct CompositionMode { const char *icon; @@ -61,37 +61,37 @@ mozc::commands::CompositionMode mode; } kPropCompositionModes[] = { { - "", // TODO(yusukes): use icons. + SCIM_ICONDIR "/scim-mozc-direct.png", "A", "/Mozc/CompositionMode/direct", "Direct", mozc::commands::DIRECT, }, { - "", + SCIM_ICONDIR "/scim-mozc-hiragana.png", "\xe3\x81\x82", // Hiragana letter A in UTF-8. "/Mozc/CompositionMode/hiragana", "Hiragana", mozc::commands::HIRAGANA, }, { - "", + SCIM_ICONDIR "/scim-mozc-katakana_full.png", "\xe3\x82\xa2", // Katakana letter A. "/Mozc/CompositionMode/full_katakana", "Full Katakana", mozc::commands::FULL_KATAKANA, }, { - "", + SCIM_ICONDIR "/scim-mozc-alpha_half.png", "_A", "/Mozc/CompositionMode/half_ascii", "Half ASCII", mozc::commands::HALF_ASCII, }, { - "", + SCIM_ICONDIR "/scim-mozc-alpha_full.png", "\xef\xbc\xa1", // Full width ASCII letter A. "/Mozc/CompositionMode/full_ascii", "Full ASCII", mozc::commands::FULL_ASCII, }, { - "", + SCIM_ICONDIR "/scim-mozc-katakana_half.png", "_\xef\xbd\xb1", // Half width Katakana letter A. "/Mozc/CompositionMode/half_katakana", "Half Katakana", @@ -301,9 +301,8 @@ composition_mode_ = mode; // Update the bar. const char *icon = GetCurrentCompositionModeIcon(); - const char *label = GetCurrentCompositionModeLabel(); scim::Property p = scim::Property( - kPropCompositionModeIcon, label, icon, "Composition mode"); + kPropCompositionMode, "", icon, "Composition mode"); update_property(p); } @@ -366,13 +365,12 @@ void ScimMozc::InitializeBar() { VLOG(1) << "Registering properties"; - // TODO(yusukes): L10N needed for "Tool", "Dictionary", and "Property". + // TODO(yusukes): L10N needed for "Tool", "Dictionary tool", and "Property". scim::PropertyList prop_list; const char *icon = GetCurrentCompositionModeIcon(); - const char *label = GetCurrentCompositionModeLabel(); scim::Property p = scim::Property( - kPropCompositionModeIcon, label, icon, "Composition mode"); + kPropCompositionMode, "", icon, "Composition mode"); prop_list.push_back(p); for (size_t i = 0; i < kNumCompositionModes; ++i) { p = scim::Property(kPropCompositionModes[i].config_path, @@ -388,7 +386,7 @@ p = scim::Property(kPropTool, "", kPropToolIcon, "Tool"); prop_list.push_back(p); p = scim::Property( - kPropToolDictionary, "Dictionary", kPropToolDictionaryIcon); + kPropToolDictionary, "Dictionary tool", kPropToolDictionaryIcon); prop_list.push_back(p); p = scim::Property(kPropToolProperty, "Property", kPropToolPropertyIcon); prop_list.push_back(p); @@ -405,12 +403,4 @@ return ""; } -const char *ScimMozc::GetCurrentCompositionModeLabel() const { - DCHECK(composition_mode_ < kNumCompositionModes); - if (composition_mode_ < kNumCompositionModes) { - return kPropCompositionModes[composition_mode_].label; - } - return ""; -} - } // namespace mozc_unix_scim diff --git a/upstream-debian/changelog b/upstream-debian/changelog index 9fdfb7d..fc2ea31 100755 --- a/upstream-debian/changelog +++ b/upstream-debian/changelog @@ -1,3 +1,9 @@ +mozc (0.12.422.102) lucid; urgency=low + + * Ver. 0.12.422.102 on 2010-07-23 + + -- Yasuhiro Matsuda Fri, 23 Jul 2010 14:33:29 +0900 + mozc (0.12.410.102) lucid; urgency=low * Ver. 0.12.410.102 on 2010-07-12 diff --git a/upstream-debian/rules b/upstream-debian/rules index da6cf4d..74b8f6b 100755 --- a/upstream-debian/rules +++ b/upstream-debian/rules @@ -53,11 +53,33 @@ cp -p $(BUILD_DIR)/Release/ibus_mozc $(CURDIR)/debian/ibus-mozc/usr/lib/ibus-mozc/ibus-engine-mozc mkdir -p $(CURDIR)/debian/ibus-mozc/usr/share/ibus/component/ sed 's|/usr/libexec/ibus-engine-mozc|/usr/lib/ibus-mozc/ibus-engine-mozc|' < $(BUILD_DIR)/Release/obj/gen/unix/ibus/mozc.xml > $(CURDIR)/debian/ibus-mozc/usr/share/ibus/component/mozc.xml + mkdir -p $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/ + cp -p data/images/unix/ime_product_icon_opensource-32.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/product_icon.png + cp -p data/images/unix/ui-tool.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/tool.png + cp -p data/images/unix/ui-properties.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/properties.png + cp -p data/images/unix/ui-dictionary.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/dictionary.png + cp -p data/images/unix/ui-direct.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/direct.png + cp -p data/images/unix/ui-hiragana.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/hiragana.png + cp -p data/images/unix/ui-katakana_half.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/katakana_half.png + cp -p data/images/unix/ui-katakana_full.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/katakana_full.png + cp -p data/images/unix/ui-alpha_half.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/alpha_half.png + cp -p data/images/unix/ui-alpha_full.png $(CURDIR)/debian/ibus-mozc/usr/share/ibus-mozc/alpha_full.png mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/IMEngine/ cp -p $(BUILD_DIR)/Release/lib.target/libscim_mozc.so $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/IMEngine/mozc.so mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/SetupUI/ cp -p $(BUILD_DIR)/Release/lib.target/libscim_mozc_setup.so $(CURDIR)/debian/scim-mozc`pkg-config --variable=moduledir scim`/SetupUI/mozc-setup.so + mkdir -p $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/ + cp -p data/images/unix/ime_product_icon_opensource-32.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc.png + cp -p data/images/unix/ui-tool.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-tool.png + cp -p data/images/unix/ui-properties.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-properties.png + cp -p data/images/unix/ui-dictionary.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-dictionary.png + cp -p data/images/unix/ui-direct.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-direct.png + cp -p data/images/unix/ui-hiragana.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-hiragana.png + cp -p data/images/unix/ui-katakana_half.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-katakana_half.png + cp -p data/images/unix/ui-katakana_full.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-katakana_full.png + cp -p data/images/unix/ui-alpha_half.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-alpha_half.png + cp -p data/images/unix/ui-alpha_full.png $(CURDIR)/debian/scim-mozc`pkg-config --variable=icondir scim`/scim-mozc-alpha_full.png mkdir -p $(CURDIR)/debian/mozc-server/usr/lib/mozc cp -p $(BUILD_DIR)/Release/mozc_server $(CURDIR)/debian/mozc-server/usr/lib/mozc/ diff --git a/usage_stats/upload_util.cc b/usage_stats/upload_util.cc index c0534ea..7e5281b 100755 --- a/usage_stats/upload_util.cc +++ b/usage_stats/upload_util.cc @@ -36,7 +36,7 @@ namespace usage_stats { namespace { const char kStatServerAddress[] = - "http://client4.google.com/tbproxy/usagestats"; + "http://clients4.google.com/tbproxy/usagestats"; const char kStatServerSourceId[] = "sourceid=ime"; const char kStatServerAddedSendHeader[] = "Content-Type: application/x-www-form-urlencoded"; diff --git a/usage_stats/upload_util_test.cc b/usage_stats/upload_util_test.cc index 3c51828..175f320 100755 --- a/usage_stats/upload_util_test.cc +++ b/usage_stats/upload_util_test.cc @@ -102,7 +102,7 @@ TEST(UploadUtilTest, UploadTest) { TestHTTPClient client; HTTPClient::SetHTTPClientHandler(&client); - const string base_url = "http://client4.google.com/tbproxy/usagestats"; + const string base_url = "http://clients4.google.com/tbproxy/usagestats"; { TestHTTPClient::Result result; result.expected_url = base_url + "?sourceid=ime&hl=ja&v=test"; diff --git a/usage_stats/usage_stats_test.cc b/usage_stats/usage_stats_test.cc index 918e233..d17058c 100755 --- a/usage_stats/usage_stats_test.cc +++ b/usage_stats/usage_stats_test.cc @@ -150,7 +150,7 @@ bool val_; }; -const char kBaseUrl[] = "http://client4.google.com/tbproxy/usagestats"; +const char kBaseUrl[] = "http://clients4.google.com/tbproxy/usagestats"; const char kTestClientId[] = "TestClientId";